mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 08:52:10 +01:00
feat: comprehensive Biome linting fixes and code quality improvements
Major code quality overhaul addressing 58% of all linting issues: • Type Safety Improvements: - Replace all any types with proper TypeScript interfaces - Fix Map component shadowing (renamed to CountryMap) - Add comprehensive custom error classes system - Enhance API route type safety • Accessibility Enhancements: - Add explicit button types to all interactive elements - Implement useId() hooks for form element accessibility - Add SVG title attributes for screen readers - Fix static element interactions with keyboard handlers • React Best Practices: - Resolve exhaustive dependencies warnings with useCallback - Extract nested component definitions to top level - Fix array index keys with proper unique identifiers - Improve component organization and prop typing • Code Organization: - Automatic import organization and type import optimization - Fix unused function parameters and variables - Enhanced error handling with structured error responses - Improve component reusability and maintainability Results: 248 → 104 total issues (58% reduction) - Fixed all critical type safety and security issues - Enhanced accessibility compliance significantly - Improved code maintainability and performance
This commit is contained in:
@ -1,7 +1,7 @@
|
||||
import { NextAuthOptions } from "next-auth";
|
||||
import bcrypt from "bcryptjs";
|
||||
import type { NextAuthOptions } from "next-auth";
|
||||
import CredentialsProvider from "next-auth/providers/credentials";
|
||||
import { prisma } from "./prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
// Define the shape of the JWT token
|
||||
declare module "next-auth/jwt" {
|
||||
@ -114,4 +114,4 @@ export const authOptions: NextAuthOptions = {
|
||||
},
|
||||
secret: process.env.NEXTAUTH_SECRET,
|
||||
debug: process.env.NODE_ENV === "development",
|
||||
};
|
||||
};
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
// Simplified CSV fetcher - fetches and parses CSV data without any processing
|
||||
// Maps directly to SessionImport table fields
|
||||
import fetch from "node-fetch";
|
||||
|
||||
import { parse } from "csv-parse/sync";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
// Raw CSV data interface matching SessionImport schema
|
||||
interface RawSessionImport {
|
||||
@ -38,7 +39,7 @@ export async function fetchAndParseCsv(
|
||||
): Promise<RawSessionImport[]> {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
? `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
|
||||
: undefined;
|
||||
|
||||
const res = await fetch(url, {
|
||||
|
||||
10
lib/env.ts
10
lib/env.ts
@ -1,7 +1,7 @@
|
||||
// Centralized environment variable management
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
/**
|
||||
* Parse environment variable value by removing quotes, comments, and trimming whitespace
|
||||
@ -40,7 +40,7 @@ function parseIntWithDefault(
|
||||
if (!cleaned) return defaultValue;
|
||||
|
||||
const parsed = parseInt(cleaned, 10);
|
||||
return isNaN(parsed) ? defaultValue : parsed;
|
||||
return Number.isNaN(parsed) ? defaultValue : parsed;
|
||||
}
|
||||
|
||||
// Load environment variables from .env.local
|
||||
@ -65,7 +65,7 @@ try {
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
} catch (_error) {
|
||||
// Silently fail if .env.local doesn't exist
|
||||
}
|
||||
|
||||
|
||||
@ -1,17 +1,16 @@
|
||||
// SessionImport to Session processor
|
||||
import {
|
||||
PrismaClient,
|
||||
SentimentCategory,
|
||||
SessionCategory,
|
||||
ProcessingStage,
|
||||
SentimentCategory,
|
||||
} from "@prisma/client";
|
||||
import cron from "node-cron";
|
||||
import { getSchedulerConfig } from "./env";
|
||||
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||
import {
|
||||
fetchTranscriptContent,
|
||||
isValidTranscriptUrl,
|
||||
} from "./transcriptFetcher";
|
||||
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||
import cron from "node-cron";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
@ -44,7 +43,7 @@ function parseEuropeanDate(dateStr: string): Date {
|
||||
const isoDateStr = `${year}-${month.padStart(2, "0")}-${day.padStart(2, "0")} ${timePart}`;
|
||||
const date = new Date(isoDateStr);
|
||||
|
||||
if (isNaN(date.getTime())) {
|
||||
if (Number.isNaN(date.getTime())) {
|
||||
throw new Error(`Failed to parse date: ${dateStr} -> ${isoDateStr}`);
|
||||
}
|
||||
|
||||
@ -54,7 +53,7 @@ function parseEuropeanDate(dateStr: string): Date {
|
||||
/**
|
||||
* Helper function to parse sentiment from raw string (fallback only)
|
||||
*/
|
||||
function parseFallbackSentiment(
|
||||
function _parseFallbackSentiment(
|
||||
sentimentRaw: string | null
|
||||
): SentimentCategory | null {
|
||||
if (!sentimentRaw) return null;
|
||||
@ -72,7 +71,7 @@ function parseFallbackSentiment(
|
||||
/**
|
||||
* Helper function to parse boolean from raw string (fallback only)
|
||||
*/
|
||||
function parseFallbackBoolean(rawValue: string | null): boolean | null {
|
||||
function _parseFallbackBoolean(rawValue: string | null): boolean | null {
|
||||
if (!rawValue) return null;
|
||||
return ["true", "1", "yes", "escalated", "forwarded"].includes(
|
||||
rawValue.toLowerCase()
|
||||
@ -113,7 +112,7 @@ async function parseTranscriptIntoMessages(
|
||||
try {
|
||||
timestamp = parseEuropeanDate(timestampMatch[1]);
|
||||
content = timestampMatch[2];
|
||||
} catch (error) {
|
||||
} catch (_error) {
|
||||
// If timestamp parsing fails, treat the whole line as content
|
||||
content = trimmedLine;
|
||||
}
|
||||
@ -367,8 +366,8 @@ export async function processQueuedImports(
|
||||
where: {
|
||||
session: null, // No session created yet
|
||||
company: {
|
||||
status: "ACTIVE" // Only process imports from active companies
|
||||
}
|
||||
status: "ACTIVE", // Only process imports from active companies
|
||||
},
|
||||
},
|
||||
take: batchSize,
|
||||
orderBy: {
|
||||
@ -403,7 +402,7 @@ export async function processQueuedImports(
|
||||
// Process with concurrency limit to avoid overwhelming the database
|
||||
const concurrencyLimit = 5;
|
||||
const results = [];
|
||||
|
||||
|
||||
for (let i = 0; i < batchPromises.length; i += concurrencyLimit) {
|
||||
const chunk = batchPromises.slice(i, i + concurrencyLimit);
|
||||
const chunkResults = await Promise.all(chunk);
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import ISO6391 from "iso-639-1";
|
||||
import countries from "i18n-iso-countries";
|
||||
|
||||
// Register locales for i18n-iso-countries
|
||||
import enLocale from "i18n-iso-countries/langs/en.json" with { type: "json" };
|
||||
import ISO6391 from "iso-639-1";
|
||||
|
||||
countries.registerLocale(enLocale);
|
||||
|
||||
/**
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
// Functions to calculate metrics over sessions
|
||||
import {
|
||||
ChatSession,
|
||||
DayMetrics,
|
||||
import type {
|
||||
CategoryMetrics,
|
||||
LanguageMetrics,
|
||||
ChatSession,
|
||||
CountryMetrics, // Added CountryMetrics
|
||||
DayMetrics,
|
||||
LanguageMetrics,
|
||||
MetricsResult,
|
||||
WordCloudWord, // Added WordCloudWord
|
||||
TopQuestion, // Added TopQuestion
|
||||
WordCloudWord, // Added WordCloudWord
|
||||
} from "./types";
|
||||
|
||||
interface CompanyConfig {
|
||||
@ -387,18 +387,18 @@ export function sessionMetrics(
|
||||
const startTimeMs = new Date(session.startTime).getTime();
|
||||
const endTimeMs = new Date(session.endTime).getTime();
|
||||
|
||||
if (isNaN(startTimeMs)) {
|
||||
if (Number.isNaN(startTimeMs)) {
|
||||
console.warn(
|
||||
`[metrics] Invalid startTime for session ${session.id || session.sessionId}: ${session.startTime}`
|
||||
);
|
||||
}
|
||||
if (isNaN(endTimeMs)) {
|
||||
if (Number.isNaN(endTimeMs)) {
|
||||
console.warn(
|
||||
`[metrics] Invalid endTime for session ${session.id || session.sessionId}: ${session.endTime}`
|
||||
);
|
||||
}
|
||||
|
||||
if (!isNaN(startTimeMs) && !isNaN(endTimeMs)) {
|
||||
if (!Number.isNaN(startTimeMs) && !Number.isNaN(endTimeMs)) {
|
||||
const timeDifference = endTimeMs - startTimeMs; // Calculate the signed delta
|
||||
// Use the absolute difference for duration, ensuring it's not negative.
|
||||
// If times are identical, duration will be 0.
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { NextAuthOptions } from "next-auth";
|
||||
import bcrypt from "bcryptjs";
|
||||
import type { NextAuthOptions } from "next-auth";
|
||||
import CredentialsProvider from "next-auth/providers/credentials";
|
||||
import { prisma } from "./prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
// Define the shape of the JWT token for platform users
|
||||
declare module "next-auth/jwt" {
|
||||
@ -56,7 +56,10 @@ export const platformAuthOptions: NextAuthOptions = {
|
||||
|
||||
if (!platformUser) return null;
|
||||
|
||||
const valid = await bcrypt.compare(credentials.password, platformUser.password);
|
||||
const valid = await bcrypt.compare(
|
||||
credentials.password,
|
||||
platformUser.password
|
||||
);
|
||||
if (!valid) return null;
|
||||
|
||||
return {
|
||||
@ -105,4 +108,4 @@ export const platformAuthOptions: NextAuthOptions = {
|
||||
},
|
||||
secret: process.env.NEXTAUTH_SECRET,
|
||||
debug: process.env.NODE_ENV === "development",
|
||||
};
|
||||
};
|
||||
|
||||
@ -1,14 +1,15 @@
|
||||
// Enhanced session processing scheduler with AI cost tracking and question management
|
||||
import cron from "node-cron";
|
||||
|
||||
import {
|
||||
PrismaClient,
|
||||
SentimentCategory,
|
||||
SessionCategory,
|
||||
ProcessingStage,
|
||||
type SentimentCategory,
|
||||
type SessionCategory,
|
||||
} from "@prisma/client";
|
||||
import cron from "node-cron";
|
||||
import fetch from "node-fetch";
|
||||
import { getSchedulerConfig } from "./schedulerConfig";
|
||||
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||
import { getSchedulerConfig } from "./schedulerConfig";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
@ -201,32 +202,30 @@ async function processQuestions(
|
||||
});
|
||||
|
||||
// Filter and prepare unique questions
|
||||
const uniqueQuestions = [...new Set(questions.filter(q => q.trim()))];
|
||||
const uniqueQuestions = [...new Set(questions.filter((q) => q.trim()))];
|
||||
if (uniqueQuestions.length === 0) return;
|
||||
|
||||
// Batch create questions (skip duplicates)
|
||||
await prisma.question.createMany({
|
||||
data: uniqueQuestions.map(content => ({ content: content.trim() })),
|
||||
data: uniqueQuestions.map((content) => ({ content: content.trim() })),
|
||||
skipDuplicates: true,
|
||||
});
|
||||
|
||||
// Fetch all question IDs in one query
|
||||
const existingQuestions = await prisma.question.findMany({
|
||||
where: { content: { in: uniqueQuestions.map(q => q.trim()) } },
|
||||
where: { content: { in: uniqueQuestions.map((q) => q.trim()) } },
|
||||
select: { id: true, content: true },
|
||||
});
|
||||
|
||||
// Create a map for quick lookup
|
||||
const questionMap = new Map(
|
||||
existingQuestions.map(q => [q.content, q.id])
|
||||
);
|
||||
const questionMap = new Map(existingQuestions.map((q) => [q.content, q.id]));
|
||||
|
||||
// Prepare session questions data
|
||||
const sessionQuestionsData = questions
|
||||
.map((questionText, index) => {
|
||||
const trimmed = questionText.trim();
|
||||
if (!trimmed) return null;
|
||||
|
||||
|
||||
const questionId = questionMap.get(trimmed);
|
||||
if (!questionId) return null;
|
||||
|
||||
|
||||
@ -174,9 +174,9 @@ export class ProcessingStatusManager {
|
||||
status: ProcessingStatus.PENDING,
|
||||
session: {
|
||||
company: {
|
||||
status: "ACTIVE" // Only process sessions from active companies
|
||||
}
|
||||
}
|
||||
status: "ACTIVE", // Only process sessions from active companies
|
||||
},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
session: {
|
||||
@ -187,19 +187,22 @@ export class ProcessingStatusManager {
|
||||
startTime: true,
|
||||
endTime: true,
|
||||
fullTranscriptUrl: true,
|
||||
import: stage === ProcessingStage.TRANSCRIPT_FETCH ? {
|
||||
select: {
|
||||
id: true,
|
||||
fullTranscriptUrl: true,
|
||||
externalSessionId: true,
|
||||
}
|
||||
} : false,
|
||||
import:
|
||||
stage === ProcessingStage.TRANSCRIPT_FETCH
|
||||
? {
|
||||
select: {
|
||||
id: true,
|
||||
fullTranscriptUrl: true,
|
||||
externalSessionId: true,
|
||||
},
|
||||
}
|
||||
: false,
|
||||
company: {
|
||||
select: {
|
||||
id: true,
|
||||
csvUsername: true,
|
||||
csvPassword: true,
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -270,7 +273,7 @@ export class ProcessingStatusManager {
|
||||
select: {
|
||||
id: true,
|
||||
externalSessionId: true,
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -338,7 +341,10 @@ export class ProcessingStatusManager {
|
||||
const previousStages = stageOrder.slice(0, currentStageIndex);
|
||||
|
||||
for (const prevStage of previousStages) {
|
||||
const isCompleted = await this.hasCompletedStage(sessionId, prevStage);
|
||||
const isCompleted = await ProcessingStatusManager.hasCompletedStage(
|
||||
sessionId,
|
||||
prevStage
|
||||
);
|
||||
if (!isCompleted) return false;
|
||||
}
|
||||
|
||||
|
||||
150
lib/scheduler.ts
150
lib/scheduler.ts
@ -1,7 +1,7 @@
|
||||
// CSV import scheduler with configurable intervals
|
||||
import cron from "node-cron";
|
||||
import { prisma } from "./prisma";
|
||||
import { fetchAndParseCsv } from "./csvFetcher";
|
||||
import { prisma } from "./prisma";
|
||||
import { getSchedulerConfig } from "./schedulerConfig";
|
||||
|
||||
export function startCsvImportScheduler() {
|
||||
@ -27,7 +27,7 @@ export function startCsvImportScheduler() {
|
||||
where: { status: "ACTIVE" }, // Only process active companies
|
||||
take: batchSize,
|
||||
skip: skip,
|
||||
orderBy: { createdAt: 'asc' }
|
||||
orderBy: { createdAt: "asc" },
|
||||
});
|
||||
|
||||
if (companies.length === 0) {
|
||||
@ -36,82 +36,84 @@ export function startCsvImportScheduler() {
|
||||
}
|
||||
|
||||
// Process companies in parallel within batch
|
||||
await Promise.all(companies.map(async (company) => {
|
||||
try {
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
await Promise.all(
|
||||
companies.map(async (company) => {
|
||||
try {
|
||||
// Use upsert to handle duplicates gracefully
|
||||
await prisma.sessionImport.upsert({
|
||||
where: {
|
||||
companyId_externalSessionId: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
// Update existing record with latest data
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
create: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Log individual session import errors but continue processing
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
try {
|
||||
// Use upsert to handle duplicates gracefully
|
||||
await prisma.sessionImport.upsert({
|
||||
where: {
|
||||
companyId_externalSessionId: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
// Update existing record with latest data
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
create: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Log individual session import errors but continue processing
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to import session ${rawSession.externalSessionId} for company ${company.name}: ${error}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[Scheduler] Imported ${rawSessionData.length} session records for company: ${company.name}\n`
|
||||
);
|
||||
} catch (e) {
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to import session ${rawSession.externalSessionId} for company ${company.name}: ${error}\n`
|
||||
`[Scheduler] Failed to fetch CSV for company: ${company.name} - ${e}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[Scheduler] Imported ${rawSessionData.length} session records for company: ${company.name}\n`
|
||||
);
|
||||
} catch (e) {
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to fetch CSV for company: ${company.name} - ${e}\n`
|
||||
);
|
||||
}
|
||||
}));
|
||||
})
|
||||
);
|
||||
|
||||
skip += batchSize;
|
||||
|
||||
|
||||
@ -42,6 +42,6 @@ export function getSchedulerConfig(): SchedulerConfig {
|
||||
* Log scheduler configuration
|
||||
* @deprecated Use logEnvConfig from lib/env.ts instead
|
||||
*/
|
||||
export function logSchedulerConfig(config: SchedulerConfig): void {
|
||||
export function logSchedulerConfig(_config: SchedulerConfig): void {
|
||||
logEnvConfig();
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// Combined scheduler initialization
|
||||
import { startCsvImportScheduler } from "./scheduler";
|
||||
|
||||
import { startProcessingScheduler } from "./processingScheduler";
|
||||
import { startCsvImportScheduler } from "./scheduler";
|
||||
|
||||
/**
|
||||
* Initialize all schedulers
|
||||
|
||||
@ -30,7 +30,7 @@ export async function fetchTranscriptContent(
|
||||
// Prepare authentication header if credentials provided
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
? `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
|
||||
: undefined;
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
@ -141,7 +141,7 @@ export function extractSessionIdFromTranscript(content: string): string | null {
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const match = content.match(pattern);
|
||||
if (match && match[1]) {
|
||||
if (match?.[1]) {
|
||||
return match[1].trim();
|
||||
}
|
||||
}
|
||||
|
||||
@ -132,7 +132,7 @@ export function parseTranscriptToMessages(
|
||||
};
|
||||
} else if (currentMessage) {
|
||||
// Continue previous message (multi-line)
|
||||
currentMessage.content += "\n" + trimmedLine;
|
||||
currentMessage.content += `\n${trimmedLine}`;
|
||||
}
|
||||
// If no current message and no role match, skip the line (orphaned content)
|
||||
}
|
||||
@ -165,7 +165,7 @@ export function parseTranscriptToMessages(
|
||||
if (msgWithTimestamp.timestamp) {
|
||||
try {
|
||||
message.timestamp = parseEuropeanDate(msgWithTimestamp.timestamp);
|
||||
} catch (error) {
|
||||
} catch (_error) {
|
||||
// Fallback to distributed timestamp if parsing fails
|
||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||
const messageInterval =
|
||||
@ -282,7 +282,7 @@ export async function processSessionTranscript(
|
||||
await storeMessagesForSession(sessionId, parseResult.messages!);
|
||||
|
||||
console.log(
|
||||
`✅ Processed ${parseResult.messages!.length} messages for session ${sessionId}`
|
||||
`✅ Processed ${parseResult.messages?.length} messages for session ${sessionId}`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { Session as NextAuthSession } from "next-auth";
|
||||
import type { Session as NextAuthSession } from "next-auth";
|
||||
|
||||
export interface UserSession extends NextAuthSession {
|
||||
user: {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { clsx, type ClassValue } from "clsx";
|
||||
import { type ClassValue, clsx } from "clsx";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
|
||||
Reference in New Issue
Block a user