mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 15:32:10 +01:00
feat: comprehensive Biome linting fixes and code quality improvements
Major code quality overhaul addressing 58% of all linting issues: • Type Safety Improvements: - Replace all any types with proper TypeScript interfaces - Fix Map component shadowing (renamed to CountryMap) - Add comprehensive custom error classes system - Enhance API route type safety • Accessibility Enhancements: - Add explicit button types to all interactive elements - Implement useId() hooks for form element accessibility - Add SVG title attributes for screen readers - Fix static element interactions with keyboard handlers • React Best Practices: - Resolve exhaustive dependencies warnings with useCallback - Extract nested component definitions to top level - Fix array index keys with proper unique identifiers - Improve component organization and prop typing • Code Organization: - Automatic import organization and type import optimization - Fix unused function parameters and variables - Enhanced error handling with structured error responses - Improve component reusability and maintainability Results: 248 → 104 total issues (58% reduction) - Fixed all critical type safety and security issues - Enhanced accessibility compliance significantly - Improved code maintainability and performance
This commit is contained in:
150
lib/scheduler.ts
150
lib/scheduler.ts
@ -1,7 +1,7 @@
|
||||
// CSV import scheduler with configurable intervals
|
||||
import cron from "node-cron";
|
||||
import { prisma } from "./prisma";
|
||||
import { fetchAndParseCsv } from "./csvFetcher";
|
||||
import { prisma } from "./prisma";
|
||||
import { getSchedulerConfig } from "./schedulerConfig";
|
||||
|
||||
export function startCsvImportScheduler() {
|
||||
@ -27,7 +27,7 @@ export function startCsvImportScheduler() {
|
||||
where: { status: "ACTIVE" }, // Only process active companies
|
||||
take: batchSize,
|
||||
skip: skip,
|
||||
orderBy: { createdAt: 'asc' }
|
||||
orderBy: { createdAt: "asc" },
|
||||
});
|
||||
|
||||
if (companies.length === 0) {
|
||||
@ -36,82 +36,84 @@ export function startCsvImportScheduler() {
|
||||
}
|
||||
|
||||
// Process companies in parallel within batch
|
||||
await Promise.all(companies.map(async (company) => {
|
||||
try {
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
await Promise.all(
|
||||
companies.map(async (company) => {
|
||||
try {
|
||||
// Use upsert to handle duplicates gracefully
|
||||
await prisma.sessionImport.upsert({
|
||||
where: {
|
||||
companyId_externalSessionId: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
// Update existing record with latest data
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
create: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Log individual session import errors but continue processing
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
try {
|
||||
// Use upsert to handle duplicates gracefully
|
||||
await prisma.sessionImport.upsert({
|
||||
where: {
|
||||
companyId_externalSessionId: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
// Update existing record with latest data
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
create: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Log individual session import errors but continue processing
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to import session ${rawSession.externalSessionId} for company ${company.name}: ${error}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[Scheduler] Imported ${rawSessionData.length} session records for company: ${company.name}\n`
|
||||
);
|
||||
} catch (e) {
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to import session ${rawSession.externalSessionId} for company ${company.name}: ${error}\n`
|
||||
`[Scheduler] Failed to fetch CSV for company: ${company.name} - ${e}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[Scheduler] Imported ${rawSessionData.length} session records for company: ${company.name}\n`
|
||||
);
|
||||
} catch (e) {
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to fetch CSV for company: ${company.name} - ${e}\n`
|
||||
);
|
||||
}
|
||||
}));
|
||||
})
|
||||
);
|
||||
|
||||
skip += batchSize;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user