mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 12:12:09 +01:00
feat: add repository pattern, service layer architecture, and scheduler management
- Implement repository pattern for data access layer - Add comprehensive service layer for business logic - Create scheduler management system with health monitoring - Add bounded buffer utility for memory management - Enhance security audit logging with retention policies
This commit is contained in:
271
lib/services/AlertManagementService.ts
Normal file
271
lib/services/AlertManagementService.ts
Normal file
@ -0,0 +1,271 @@
|
||||
import { TIME } from "../constants";
|
||||
import {
|
||||
AuditOutcome,
|
||||
AuditSeverity,
|
||||
SecurityEventType,
|
||||
securityAuditLogger,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
AlertChannel,
|
||||
AlertSeverity,
|
||||
type MonitoringConfig,
|
||||
type SecurityAlert,
|
||||
} from "../securityMonitoring";
|
||||
|
||||
/**
|
||||
* Handles security alert management and notifications
|
||||
* Single Responsibility: Alert creation, storage, and notifications
|
||||
*/
|
||||
export class AlertManagementService {
|
||||
private alerts: SecurityAlert[] = [];
|
||||
|
||||
constructor(private config: MonitoringConfig) {}
|
||||
|
||||
/**
|
||||
* Create and store a new security alert
|
||||
*/
|
||||
async createAlert(
|
||||
alertData: Omit<SecurityAlert, "id" | "timestamp" | "acknowledged">
|
||||
): Promise<SecurityAlert | null> {
|
||||
// Check for duplicate suppression
|
||||
const suppressionWindow = new Date(
|
||||
Date.now() - this.config.alerting.suppressDuplicateMinutes * 60 * 1000
|
||||
);
|
||||
const isDuplicate = this.alerts.some(
|
||||
(a) =>
|
||||
a.type === alertData.type &&
|
||||
a.context.ipAddress === alertData.context.ipAddress &&
|
||||
a.timestamp > suppressionWindow
|
||||
);
|
||||
|
||||
if (isDuplicate) return null;
|
||||
|
||||
const alert: SecurityAlert = {
|
||||
id: crypto.randomUUID(),
|
||||
timestamp: new Date(),
|
||||
acknowledged: false,
|
||||
...alertData,
|
||||
};
|
||||
|
||||
this.alerts.push(alert);
|
||||
|
||||
// Log alert creation
|
||||
await securityAuditLogger.log({
|
||||
eventType: SecurityEventType.SYSTEM_CONFIG,
|
||||
action: "security_alert_created",
|
||||
outcome: AuditOutcome.SUCCESS,
|
||||
severity: this.mapAlertSeverityToAuditSeverity(alert.severity),
|
||||
context: alert.context,
|
||||
});
|
||||
|
||||
// Send notifications if enabled
|
||||
if (this.config.alerting.enabled) {
|
||||
await this.sendAlertNotifications(alert);
|
||||
}
|
||||
|
||||
return alert;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active security alerts
|
||||
*/
|
||||
getActiveAlerts(severity?: AlertSeverity): SecurityAlert[] {
|
||||
return this.alerts.filter(
|
||||
(alert) =>
|
||||
!alert.acknowledged && (!severity || alert.severity === severity)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all alerts within time range
|
||||
*/
|
||||
getAlertsInTimeRange(timeRange: { start: Date; end: Date }): SecurityAlert[] {
|
||||
return this.alerts.filter(
|
||||
(alert) =>
|
||||
alert.timestamp >= timeRange.start && alert.timestamp <= timeRange.end
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Acknowledge an alert
|
||||
*/
|
||||
async acknowledgeAlert(
|
||||
alertId: string,
|
||||
acknowledgedBy: string
|
||||
): Promise<boolean> {
|
||||
const alert = this.alerts.find((a) => a.id === alertId);
|
||||
if (!alert) return false;
|
||||
|
||||
alert.acknowledged = true;
|
||||
alert.acknowledgedBy = acknowledgedBy;
|
||||
alert.acknowledgedAt = new Date();
|
||||
|
||||
// Log the acknowledgment
|
||||
await securityAuditLogger.log({
|
||||
eventType: SecurityEventType.SYSTEM_CONFIG,
|
||||
action: "alert_acknowledged",
|
||||
outcome: AuditOutcome.SUCCESS,
|
||||
severity: AuditSeverity.INFO,
|
||||
context: {
|
||||
userId: acknowledgedBy,
|
||||
metadata: { alertId, alertType: alert.type },
|
||||
},
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export security alerts for analysis
|
||||
*/
|
||||
exportAlertsData(
|
||||
format: "json" | "csv",
|
||||
timeRange: { start: Date; end: Date }
|
||||
): string {
|
||||
const filteredAlerts = this.getAlertsInTimeRange(timeRange);
|
||||
|
||||
if (format === "csv") {
|
||||
const headers = [
|
||||
"timestamp",
|
||||
"severity",
|
||||
"type",
|
||||
"title",
|
||||
"description",
|
||||
"eventType",
|
||||
"userId",
|
||||
"companyId",
|
||||
"ipAddress",
|
||||
"userAgent",
|
||||
"acknowledged",
|
||||
].join(",");
|
||||
|
||||
const rows = filteredAlerts.map((alert) =>
|
||||
[
|
||||
alert.timestamp.toISOString(),
|
||||
alert.severity,
|
||||
alert.type,
|
||||
`"${alert.title}"`,
|
||||
`"${alert.description}"`,
|
||||
alert.eventType,
|
||||
alert.context.userId || "",
|
||||
alert.context.companyId || "",
|
||||
alert.context.ipAddress || "",
|
||||
alert.context.userAgent || "",
|
||||
alert.acknowledged.toString(),
|
||||
].join(",")
|
||||
);
|
||||
|
||||
return [headers, ...rows].join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(filteredAlerts, null, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old alerts based on retention policy
|
||||
*/
|
||||
cleanupOldAlerts(): void {
|
||||
const alertCutoff = new Date(
|
||||
Date.now() - this.config.retention.alertRetentionDays * TIME.DAY
|
||||
);
|
||||
this.alerts = this.alerts.filter((a) => a.timestamp >= alertCutoff);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get alert statistics
|
||||
*/
|
||||
getAlertStats(): {
|
||||
total: number;
|
||||
active: number;
|
||||
acknowledged: number;
|
||||
bySeverity: Record<AlertSeverity, number>;
|
||||
} {
|
||||
const bySeverity = this.alerts.reduce(
|
||||
(acc, alert) => {
|
||||
acc[alert.severity] = (acc[alert.severity] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AlertSeverity, number>
|
||||
);
|
||||
|
||||
return {
|
||||
total: this.alerts.length,
|
||||
active: this.alerts.filter((a) => !a.acknowledged).length,
|
||||
acknowledged: this.alerts.filter((a) => a.acknowledged).length,
|
||||
bySeverity,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Send alert notifications via configured channels
|
||||
*/
|
||||
private async sendAlertNotifications(alert: SecurityAlert): Promise<void> {
|
||||
// Console logging for immediate visibility
|
||||
console.error(
|
||||
`🚨 SECURITY ALERT [${alert.severity}] ${alert.type}: ${alert.title}`
|
||||
);
|
||||
console.error(`Description: ${alert.description}`);
|
||||
console.error("Context:", alert.context);
|
||||
|
||||
// In production, implement actual notification integrations:
|
||||
for (const channel of this.config.alerting.channels) {
|
||||
switch (channel) {
|
||||
case AlertChannel.EMAIL:
|
||||
await this.sendEmailNotification(alert);
|
||||
break;
|
||||
case AlertChannel.SLACK:
|
||||
await this.sendSlackNotification(alert);
|
||||
break;
|
||||
case AlertChannel.WEBHOOK:
|
||||
await this.sendWebhookNotification(alert);
|
||||
break;
|
||||
case AlertChannel.DISCORD:
|
||||
await this.sendDiscordNotification(alert);
|
||||
break;
|
||||
case AlertChannel.PAGERDUTY:
|
||||
await this.sendPagerDutyNotification(alert);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async sendEmailNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement email notification
|
||||
console.log(`[EMAIL] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendSlackNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement Slack webhook notification
|
||||
console.log(`[SLACK] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendWebhookNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement custom webhook notification
|
||||
console.log(`[WEBHOOK] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendDiscordNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement Discord webhook notification
|
||||
console.log(`[DISCORD] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendPagerDutyNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement PagerDuty API notification
|
||||
console.log(`[PAGERDUTY] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private mapAlertSeverityToAuditSeverity(
|
||||
severity: AlertSeverity
|
||||
): AuditSeverity {
|
||||
switch (severity) {
|
||||
case AlertSeverity.CRITICAL:
|
||||
return AuditSeverity.CRITICAL;
|
||||
case AlertSeverity.HIGH:
|
||||
return AuditSeverity.HIGH;
|
||||
case AlertSeverity.MEDIUM:
|
||||
return AuditSeverity.MEDIUM;
|
||||
case AlertSeverity.LOW:
|
||||
return AuditSeverity.LOW;
|
||||
}
|
||||
}
|
||||
}
|
||||
319
lib/services/ErrorHandlingService.ts
Normal file
319
lib/services/ErrorHandlingService.ts
Normal file
@ -0,0 +1,319 @@
|
||||
/**
|
||||
* Centralized error handling service
|
||||
* Provides consistent error handling patterns across the application
|
||||
*/
|
||||
|
||||
import { DATABASE, SCHEDULER } from "../constants";
|
||||
|
||||
export interface ErrorContext {
|
||||
operation: string;
|
||||
component: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
userId?: string;
|
||||
companyId?: string;
|
||||
}
|
||||
|
||||
export interface RetryConfig {
|
||||
maxAttempts: number;
|
||||
baseDelay: number;
|
||||
maxDelay: number;
|
||||
backoffMultiplier: number;
|
||||
jitter: boolean;
|
||||
}
|
||||
|
||||
export class ErrorHandlingService {
|
||||
private static instance: ErrorHandlingService;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): ErrorHandlingService {
|
||||
if (!ErrorHandlingService.instance) {
|
||||
ErrorHandlingService.instance = new ErrorHandlingService();
|
||||
}
|
||||
return ErrorHandlingService.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute operation with standardized error handling and retry logic
|
||||
*/
|
||||
async executeWithRetry<T>(
|
||||
operation: () => Promise<T>,
|
||||
context: ErrorContext,
|
||||
retryConfig: Partial<RetryConfig> = {}
|
||||
): Promise<T> {
|
||||
const config: RetryConfig = {
|
||||
maxAttempts: DATABASE.MAX_RETRY_ATTEMPTS,
|
||||
baseDelay: DATABASE.RETRY_DELAY_BASE,
|
||||
maxDelay: DATABASE.RETRY_DELAY_MAX,
|
||||
backoffMultiplier: 2,
|
||||
jitter: true,
|
||||
...retryConfig,
|
||||
};
|
||||
|
||||
let lastError: Error;
|
||||
|
||||
for (let attempt = 1; attempt <= config.maxAttempts; attempt++) {
|
||||
try {
|
||||
const result = await operation();
|
||||
|
||||
// Log successful retry if previous attempts failed
|
||||
if (attempt > 1) {
|
||||
console.info(
|
||||
`${context.component}.${context.operation} succeeded on attempt ${attempt}`,
|
||||
{
|
||||
context,
|
||||
attempt,
|
||||
maxAttempts: config.maxAttempts,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
const isLastAttempt = attempt === config.maxAttempts;
|
||||
const shouldRetry = this.shouldRetry(
|
||||
lastError,
|
||||
attempt,
|
||||
config.maxAttempts
|
||||
);
|
||||
|
||||
if (isLastAttempt || !shouldRetry) {
|
||||
this.logError(lastError, context, {
|
||||
attempt,
|
||||
maxAttempts: config.maxAttempts,
|
||||
finalFailure: true,
|
||||
});
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
// Log retry attempt
|
||||
this.logError(lastError, context, {
|
||||
attempt,
|
||||
maxAttempts: config.maxAttempts,
|
||||
willRetry: true,
|
||||
});
|
||||
|
||||
// Wait before retry with exponential backoff and jitter
|
||||
const delay = this.calculateDelay(attempt, config);
|
||||
await this.sleep(delay);
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute scheduler operation with standardized error handling
|
||||
*/
|
||||
async executeSchedulerOperation<T>(
|
||||
operation: () => Promise<T>,
|
||||
schedulerName: string,
|
||||
operationName: string,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<T> {
|
||||
const context: ErrorContext = {
|
||||
operation: operationName,
|
||||
component: `scheduler.${schedulerName}`,
|
||||
metadata,
|
||||
};
|
||||
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
const result = await this.executeWithRetry(operation, context);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Log successful operation
|
||||
console.debug(
|
||||
`Scheduler operation completed: ${schedulerName}.${operationName}`,
|
||||
{
|
||||
duration,
|
||||
metadata,
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Final error logging with enhanced context
|
||||
this.logSchedulerError(
|
||||
error as Error,
|
||||
schedulerName,
|
||||
operationName,
|
||||
metadata
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute API operation with timeout and error handling
|
||||
*/
|
||||
async executeApiOperation<T>(
|
||||
operation: () => Promise<T>,
|
||||
apiName: string,
|
||||
operationName: string,
|
||||
timeoutMs: number = SCHEDULER.MAX_PROCESSING_TIME,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<T> {
|
||||
const context: ErrorContext = {
|
||||
operation: operationName,
|
||||
component: `api.${apiName}`,
|
||||
metadata,
|
||||
};
|
||||
|
||||
return this.executeWithRetry(
|
||||
() => this.withTimeout(operation(), timeoutMs),
|
||||
context
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log error with consistent format and context
|
||||
*/
|
||||
private logError(
|
||||
error: Error,
|
||||
context: ErrorContext,
|
||||
additionalInfo?: Record<string, unknown>
|
||||
): void {
|
||||
const errorInfo = {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
name: error.name,
|
||||
context,
|
||||
timestamp: new Date().toISOString(),
|
||||
...additionalInfo,
|
||||
};
|
||||
|
||||
console.error(
|
||||
`Error in ${context.component}.${context.operation}:`,
|
||||
errorInfo
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log scheduler-specific errors with enhanced context
|
||||
*/
|
||||
private logSchedulerError(
|
||||
error: Error,
|
||||
schedulerName: string,
|
||||
operationName: string,
|
||||
metadata?: Record<string, unknown>
|
||||
): void {
|
||||
console.error(
|
||||
`Scheduler ${schedulerName} failed during ${operationName}:`,
|
||||
{
|
||||
error: {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
name: error.name,
|
||||
},
|
||||
scheduler: schedulerName,
|
||||
operation: operationName,
|
||||
metadata,
|
||||
timestamp: new Date().toISOString(),
|
||||
severity: "ERROR",
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if error is retryable
|
||||
*/
|
||||
private shouldRetry(
|
||||
error: Error,
|
||||
attempt: number,
|
||||
maxAttempts: number
|
||||
): boolean {
|
||||
if (attempt >= maxAttempts) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't retry certain types of errors
|
||||
const nonRetryableErrors = [
|
||||
"ValidationError",
|
||||
"AuthenticationError",
|
||||
"AuthorizationError",
|
||||
"NotFoundError",
|
||||
"BadRequestError",
|
||||
];
|
||||
|
||||
if (nonRetryableErrors.includes(error.name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't retry if error message indicates non-retryable condition
|
||||
const nonRetryableMessages = [
|
||||
"invalid input",
|
||||
"unauthorized",
|
||||
"forbidden",
|
||||
"not found",
|
||||
"bad request",
|
||||
];
|
||||
|
||||
const errorMessage = error.message.toLowerCase();
|
||||
if (nonRetryableMessages.some((msg) => errorMessage.includes(msg))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate delay with exponential backoff and jitter
|
||||
*/
|
||||
private calculateDelay(attempt: number, config: RetryConfig): number {
|
||||
const exponentialDelay =
|
||||
config.baseDelay * config.backoffMultiplier ** (attempt - 1);
|
||||
const cappedDelay = Math.min(exponentialDelay, config.maxDelay);
|
||||
|
||||
if (!config.jitter) {
|
||||
return cappedDelay;
|
||||
}
|
||||
|
||||
// Add jitter: ±25% of the delay
|
||||
const jitterRange = cappedDelay * 0.25;
|
||||
const jitter = (Math.random() - 0.5) * 2 * jitterRange;
|
||||
|
||||
return Math.max(0, cappedDelay + jitter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add timeout to a promise
|
||||
*/
|
||||
private withTimeout<T>(promise: Promise<T>, timeoutMs: number): Promise<T> {
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error(`Operation timed out after ${timeoutMs}ms`));
|
||||
}, timeoutMs);
|
||||
});
|
||||
|
||||
return Promise.race([promise, timeoutPromise]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sleep for specified duration
|
||||
*/
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create error with enhanced context
|
||||
*/
|
||||
createError(
|
||||
message: string,
|
||||
context: ErrorContext,
|
||||
originalError?: Error
|
||||
): Error {
|
||||
const enhancedMessage = `${context.component}.${context.operation}: ${message}`;
|
||||
const error = new Error(enhancedMessage);
|
||||
|
||||
if (originalError) {
|
||||
error.stack = originalError.stack;
|
||||
error.cause = originalError;
|
||||
}
|
||||
|
||||
return error;
|
||||
}
|
||||
}
|
||||
87
lib/services/SecurityEventProcessor.ts
Normal file
87
lib/services/SecurityEventProcessor.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import { SECURITY_MONITORING, TIME } from "../constants";
|
||||
import {
|
||||
type AuditLogContext,
|
||||
type AuditOutcome,
|
||||
AuditSeverity,
|
||||
type SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import { BoundedBuffer } from "../utils/BoundedBuffer";
|
||||
|
||||
export interface SecurityEventData {
|
||||
timestamp: Date;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
outcome: AuditOutcome;
|
||||
severity: AuditSeverity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles security event processing and buffering
|
||||
* Single Responsibility: Event collection and storage
|
||||
*/
|
||||
export class SecurityEventProcessor {
|
||||
private eventBuffer: BoundedBuffer<SecurityEventData>;
|
||||
|
||||
constructor() {
|
||||
this.eventBuffer = new BoundedBuffer<SecurityEventData>({
|
||||
maxSize: SECURITY_MONITORING.EVENT_BUFFER_MAX_SIZE,
|
||||
retentionTime: SECURITY_MONITORING.EVENT_RETENTION_HOURS * TIME.HOUR,
|
||||
cleanupThreshold: 0.9,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add security event to buffer
|
||||
*/
|
||||
addEvent(
|
||||
eventType: SecurityEventType,
|
||||
outcome: AuditOutcome,
|
||||
context: AuditLogContext,
|
||||
severity: AuditSeverity = AuditSeverity.INFO
|
||||
): void {
|
||||
this.eventBuffer.push({
|
||||
timestamp: new Date(),
|
||||
eventType,
|
||||
context,
|
||||
outcome,
|
||||
severity,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get events within time range
|
||||
*/
|
||||
getEventsWithinTime(timeRangeMs: number): SecurityEventData[] {
|
||||
return this.eventBuffer.getWithinTime(timeRangeMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent events for analysis
|
||||
*/
|
||||
getRecentEvents(): SecurityEventData[] {
|
||||
return this.eventBuffer.getWithinTime(
|
||||
SECURITY_MONITORING.THREAT_DETECTION_WINDOW
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual cleanup of old events
|
||||
*/
|
||||
cleanup(): void {
|
||||
this.eventBuffer.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current buffer statistics
|
||||
*/
|
||||
getStats(): {
|
||||
bufferSize: number;
|
||||
eventsCount: number;
|
||||
} {
|
||||
const recentEvents = this.getRecentEvents();
|
||||
return {
|
||||
bufferSize: SECURITY_MONITORING.EVENT_BUFFER_MAX_SIZE,
|
||||
eventsCount: recentEvents.length,
|
||||
};
|
||||
}
|
||||
}
|
||||
338
lib/services/SecurityMetricsService.ts
Normal file
338
lib/services/SecurityMetricsService.ts
Normal file
@ -0,0 +1,338 @@
|
||||
import { getSecurityAuditLogRepository } from "../repositories/RepositoryFactory";
|
||||
import {
|
||||
AuditOutcome,
|
||||
AuditSeverity,
|
||||
SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
AlertType,
|
||||
type SecurityAlert,
|
||||
type SecurityMetrics,
|
||||
ThreatLevel,
|
||||
} from "../securityMonitoring";
|
||||
|
||||
/**
|
||||
* Handles security metrics calculation and reporting
|
||||
* Single Responsibility: Metrics computation and data analysis
|
||||
*/
|
||||
export class SecurityMetricsService {
|
||||
/**
|
||||
* Calculate comprehensive security metrics for a time range
|
||||
*/
|
||||
async calculateSecurityMetrics(
|
||||
timeRange: { start: Date; end: Date },
|
||||
companyId?: string,
|
||||
alerts: SecurityAlert[] = []
|
||||
): Promise<SecurityMetrics> {
|
||||
const auditRepository = getSecurityAuditLogRepository();
|
||||
|
||||
// Get security analytics using repository
|
||||
const analytics = await auditRepository.getSecurityAnalytics(
|
||||
timeRange.start,
|
||||
timeRange.end,
|
||||
companyId
|
||||
);
|
||||
|
||||
// Get additional audit log data for user risk calculations
|
||||
const events = await auditRepository.findMany({
|
||||
where: {
|
||||
timestamp: {
|
||||
gte: timeRange.start,
|
||||
lte: timeRange.end,
|
||||
},
|
||||
...(companyId && { companyId }),
|
||||
},
|
||||
});
|
||||
|
||||
// Use analytics data from repository
|
||||
const totalEvents = analytics.totalEvents;
|
||||
const criticalEvents =
|
||||
analytics.eventsBySeverity[AuditSeverity.CRITICAL] || 0;
|
||||
|
||||
const activeAlerts = alerts.filter((a) => !a.acknowledged).length;
|
||||
const resolvedAlerts = alerts.filter((a) => a.acknowledged).length;
|
||||
|
||||
// Alert distribution by type
|
||||
const alertsByType = alerts.reduce(
|
||||
(acc, alert) => {
|
||||
acc[alert.type] = (acc[alert.type] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AlertType, number>
|
||||
);
|
||||
|
||||
// Top threats from alerts
|
||||
const topThreats = Object.entries(alertsByType)
|
||||
.map(([type, count]) => ({ type: type as AlertType, count }))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 5);
|
||||
|
||||
// User risk scores
|
||||
const userRiskScores = await this.calculateUserRiskScores(events);
|
||||
|
||||
// Calculate overall security score
|
||||
const securityScore = this.calculateSecurityScore({
|
||||
totalEvents,
|
||||
criticalEvents,
|
||||
activeAlerts,
|
||||
topThreats,
|
||||
});
|
||||
|
||||
// Determine threat level
|
||||
const threatLevel = this.determineThreatLevel(
|
||||
securityScore,
|
||||
activeAlerts,
|
||||
criticalEvents
|
||||
);
|
||||
|
||||
return {
|
||||
totalEvents,
|
||||
criticalEvents,
|
||||
activeAlerts,
|
||||
resolvedAlerts,
|
||||
securityScore,
|
||||
threatLevel,
|
||||
eventsByType: analytics.eventsByType,
|
||||
alertsByType,
|
||||
topThreats,
|
||||
geoDistribution: analytics.geoDistribution,
|
||||
timeDistribution: analytics.hourlyDistribution,
|
||||
userRiskScores,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate risk scores for users based on their security events
|
||||
*/
|
||||
async calculateUserRiskScores(
|
||||
events: Array<{
|
||||
userId?: string;
|
||||
user?: { email: string };
|
||||
eventType: SecurityEventType;
|
||||
outcome: AuditOutcome;
|
||||
severity: AuditSeverity;
|
||||
country?: string;
|
||||
}>
|
||||
): Promise<Array<{ userId: string; email: string; riskScore: number }>> {
|
||||
const userEvents = events.filter((e) => e.userId);
|
||||
const userScores = new Map<
|
||||
string,
|
||||
{ email: string; score: number; events: typeof events }
|
||||
>();
|
||||
|
||||
for (const event of userEvents) {
|
||||
if (!userScores.has(event.userId)) {
|
||||
userScores.set(event.userId, {
|
||||
email: event.user?.email || "unknown",
|
||||
score: 0,
|
||||
events: [],
|
||||
});
|
||||
}
|
||||
userScores.get(event.userId)?.events.push(event);
|
||||
}
|
||||
|
||||
const riskScores: Array<{
|
||||
userId: string;
|
||||
email: string;
|
||||
riskScore: number;
|
||||
}> = [];
|
||||
|
||||
for (const [userId, userData] of userScores) {
|
||||
let riskScore = 0;
|
||||
|
||||
// Failed authentication attempts
|
||||
const failedAuth = userData.events.filter(
|
||||
(e) =>
|
||||
e.eventType === SecurityEventType.AUTHENTICATION &&
|
||||
e.outcome === AuditOutcome.FAILURE
|
||||
).length;
|
||||
riskScore += failedAuth * 10;
|
||||
|
||||
// Rate limit violations
|
||||
const rateLimited = userData.events.filter(
|
||||
(e) => e.outcome === AuditOutcome.RATE_LIMITED
|
||||
).length;
|
||||
riskScore += rateLimited * 15;
|
||||
|
||||
// Critical events
|
||||
const criticalEvents = userData.events.filter(
|
||||
(e) => e.severity === AuditSeverity.CRITICAL
|
||||
).length;
|
||||
riskScore += criticalEvents * 25;
|
||||
|
||||
// Multiple countries
|
||||
const countries = new Set(
|
||||
userData.events.map((e) => e.country).filter(Boolean)
|
||||
);
|
||||
if (countries.size > 2) riskScore += 20;
|
||||
|
||||
// Normalize score to 0-100 range
|
||||
riskScore = Math.min(100, riskScore);
|
||||
|
||||
riskScores.push({
|
||||
userId,
|
||||
email: userData.email,
|
||||
riskScore,
|
||||
});
|
||||
}
|
||||
|
||||
return riskScores.sort((a, b) => b.riskScore - a.riskScore).slice(0, 10);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate threat level for a specific IP address
|
||||
*/
|
||||
async calculateIPThreatLevel(ipAddress: string): Promise<{
|
||||
threatLevel: ThreatLevel;
|
||||
riskFactors: string[];
|
||||
recommendations: string[];
|
||||
isBlacklisted: boolean;
|
||||
}> {
|
||||
const auditRepository = getSecurityAuditLogRepository();
|
||||
|
||||
// Get IP activity summary using repository
|
||||
const activitySummary = await auditRepository.getIPActivitySummary(
|
||||
ipAddress,
|
||||
24
|
||||
);
|
||||
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const _events = await auditRepository.findByIPAddress(ipAddress, oneDayAgo);
|
||||
|
||||
const riskFactors: string[] = [];
|
||||
const recommendations: string[] = [];
|
||||
|
||||
// Use activity summary data from repository
|
||||
const {
|
||||
failedLogins,
|
||||
rateLimitViolations,
|
||||
uniqueUsersTargeted,
|
||||
totalEvents,
|
||||
} = activitySummary;
|
||||
|
||||
if (failedLogins > 10) {
|
||||
riskFactors.push(`${failedLogins} failed login attempts in 24h`);
|
||||
recommendations.push("Consider temporary IP blocking");
|
||||
}
|
||||
|
||||
if (rateLimitViolations > 5) {
|
||||
riskFactors.push(`${rateLimitViolations} rate limit violations`);
|
||||
recommendations.push("Implement stricter rate limiting");
|
||||
}
|
||||
|
||||
if (uniqueUsersTargeted > 5) {
|
||||
riskFactors.push(
|
||||
`Access attempts to ${uniqueUsersTargeted} different accounts`
|
||||
);
|
||||
recommendations.push("Investigate for account enumeration");
|
||||
}
|
||||
|
||||
// Determine threat level
|
||||
let threatLevel = ThreatLevel.LOW;
|
||||
if (riskFactors.length >= 3) threatLevel = ThreatLevel.CRITICAL;
|
||||
else if (riskFactors.length >= 2) threatLevel = ThreatLevel.HIGH;
|
||||
else if (riskFactors.length >= 1) threatLevel = ThreatLevel.MODERATE;
|
||||
|
||||
// Ensure we always provide at least basic analysis
|
||||
if (riskFactors.length === 0) {
|
||||
riskFactors.push(`${totalEvents} security events in 24h`);
|
||||
}
|
||||
|
||||
if (recommendations.length === 0) {
|
||||
recommendations.push("Continue monitoring for suspicious activity");
|
||||
}
|
||||
|
||||
// Simple blacklist check based on threat level and risk factors
|
||||
const isBlacklisted =
|
||||
threatLevel === ThreatLevel.CRITICAL && riskFactors.length >= 3;
|
||||
|
||||
return { threatLevel, riskFactors, recommendations, isBlacklisted };
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall security score based on various factors
|
||||
*/
|
||||
private calculateSecurityScore(data: {
|
||||
totalEvents: number;
|
||||
criticalEvents: number;
|
||||
activeAlerts: number;
|
||||
topThreats: Array<{ type: AlertType; count: number }>;
|
||||
}): number {
|
||||
let score = 100;
|
||||
|
||||
// Deduct points for critical events
|
||||
score -= Math.min(30, data.criticalEvents * 2);
|
||||
|
||||
// Deduct points for active alerts
|
||||
score -= Math.min(25, data.activeAlerts * 3);
|
||||
|
||||
// Deduct points for high-severity threats
|
||||
const highSeverityThreats = data.topThreats.filter((t) =>
|
||||
[
|
||||
AlertType.BRUTE_FORCE_ATTACK,
|
||||
AlertType.DATA_BREACH_ATTEMPT,
|
||||
AlertType.PRIVILEGE_ESCALATION,
|
||||
].includes(t.type)
|
||||
);
|
||||
score -= Math.min(
|
||||
20,
|
||||
highSeverityThreats.reduce((sum, t) => sum + t.count, 0) * 5
|
||||
);
|
||||
|
||||
// Deduct points for high event volume (potential attacks)
|
||||
if (data.totalEvents > 1000) {
|
||||
score -= Math.min(15, (data.totalEvents - 1000) / 100);
|
||||
}
|
||||
|
||||
return Math.max(0, Math.round(score));
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine overall threat level based on security metrics
|
||||
*/
|
||||
private determineThreatLevel(
|
||||
securityScore: number,
|
||||
activeAlerts: number,
|
||||
criticalEvents: number
|
||||
): ThreatLevel {
|
||||
if (securityScore < 50 || activeAlerts >= 5 || criticalEvents >= 3) {
|
||||
return ThreatLevel.CRITICAL;
|
||||
}
|
||||
if (securityScore < 70 || activeAlerts >= 3 || criticalEvents >= 2) {
|
||||
return ThreatLevel.HIGH;
|
||||
}
|
||||
if (securityScore < 85 || activeAlerts >= 1 || criticalEvents >= 1) {
|
||||
return ThreatLevel.MODERATE;
|
||||
}
|
||||
return ThreatLevel.LOW;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get security score trend over time
|
||||
*/
|
||||
async getSecurityScoreTrend(
|
||||
days: number,
|
||||
companyId?: string
|
||||
): Promise<Array<{ date: Date; score: number }>> {
|
||||
const trends: Array<{ date: Date; score: number }> = [];
|
||||
const now = new Date();
|
||||
|
||||
for (let i = days - 1; i >= 0; i--) {
|
||||
const date = new Date(now.getTime() - i * 24 * 60 * 60 * 1000);
|
||||
const startOfDay = new Date(date.setHours(0, 0, 0, 0));
|
||||
const endOfDay = new Date(date.setHours(23, 59, 59, 999));
|
||||
|
||||
const metrics = await this.calculateSecurityMetrics(
|
||||
{ start: startOfDay, end: endOfDay },
|
||||
companyId
|
||||
);
|
||||
|
||||
trends.push({
|
||||
date: startOfDay,
|
||||
score: metrics.securityScore,
|
||||
});
|
||||
}
|
||||
|
||||
return trends;
|
||||
}
|
||||
}
|
||||
316
lib/services/ThreatDetectionService.ts
Normal file
316
lib/services/ThreatDetectionService.ts
Normal file
@ -0,0 +1,316 @@
|
||||
import { prisma } from "../prisma";
|
||||
import {
|
||||
type AuditLogContext,
|
||||
AuditOutcome,
|
||||
SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
type MonitoringConfig,
|
||||
} from "../securityMonitoring";
|
||||
import type { SecurityEventData } from "./SecurityEventProcessor";
|
||||
|
||||
export interface ThreatDetectionResult {
|
||||
threats: Array<{
|
||||
severity: AlertSeverity;
|
||||
type: AlertType;
|
||||
title: string;
|
||||
description: string;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
metadata: Record<string, unknown>;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface AnomalyDetectionResult {
|
||||
isAnomaly: boolean;
|
||||
confidence: number;
|
||||
type: string;
|
||||
description: string;
|
||||
recommendedActions: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles security threat detection and anomaly analysis
|
||||
* Single Responsibility: Threat identification and risk assessment
|
||||
*/
|
||||
export class ThreatDetectionService {
|
||||
constructor(private config: MonitoringConfig) {}
|
||||
|
||||
/**
|
||||
* Detect immediate threats from security event
|
||||
*/
|
||||
async detectImmediateThreats(
|
||||
eventType: SecurityEventType,
|
||||
outcome: AuditOutcome,
|
||||
context: AuditLogContext,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<ThreatDetectionResult> {
|
||||
const threats: Array<{
|
||||
severity: AlertSeverity;
|
||||
type: AlertType;
|
||||
title: string;
|
||||
description: string;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
metadata: Record<string, unknown>;
|
||||
}> = [];
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Multiple failed logins detection
|
||||
if (
|
||||
eventType === SecurityEventType.AUTHENTICATION &&
|
||||
outcome === AuditOutcome.FAILURE &&
|
||||
context.ipAddress
|
||||
) {
|
||||
const threatResult = await this.detectBruteForceAttack(
|
||||
context.ipAddress,
|
||||
now
|
||||
);
|
||||
if (threatResult) {
|
||||
threats.push({
|
||||
...threatResult,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { ...threatResult.metadata, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Suspicious admin activity
|
||||
if (
|
||||
eventType === SecurityEventType.PLATFORM_ADMIN ||
|
||||
(eventType === SecurityEventType.USER_MANAGEMENT && context.userId)
|
||||
) {
|
||||
const threatResult = await this.detectSuspiciousAdminActivity(
|
||||
context.userId!,
|
||||
now
|
||||
);
|
||||
if (threatResult) {
|
||||
threats.push({
|
||||
...threatResult,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { ...threatResult.metadata, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Rate limiting violations
|
||||
if (outcome === AuditOutcome.RATE_LIMITED && context.ipAddress) {
|
||||
const threatResult = await this.detectRateLimitBreach(
|
||||
context.ipAddress,
|
||||
now
|
||||
);
|
||||
if (threatResult) {
|
||||
threats.push({
|
||||
...threatResult,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { ...threatResult.metadata, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { threats };
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect anomalies in security events
|
||||
*/
|
||||
async detectAnomalies(
|
||||
eventType: SecurityEventType,
|
||||
context: AuditLogContext,
|
||||
eventBuffer: SecurityEventData[]
|
||||
): Promise<AnomalyDetectionResult> {
|
||||
const now = new Date();
|
||||
const sevenDaysAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// Get historical data for baseline
|
||||
const historicalEvents = await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
eventType,
|
||||
timestamp: { gte: sevenDaysAgo, lt: now },
|
||||
},
|
||||
});
|
||||
|
||||
// Check for geographical anomalies
|
||||
if (context.country && context.userId) {
|
||||
const geoAnomaly = this.checkGeographicalAnomaly(
|
||||
context.userId,
|
||||
context.country,
|
||||
historicalEvents
|
||||
);
|
||||
if (geoAnomaly.isAnomaly) return geoAnomaly;
|
||||
}
|
||||
|
||||
// Check for time-based anomalies
|
||||
const timeAnomaly = this.checkTemporalAnomaly(
|
||||
eventType,
|
||||
now,
|
||||
historicalEvents,
|
||||
eventBuffer
|
||||
);
|
||||
if (timeAnomaly.isAnomaly) return timeAnomaly;
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
|
||||
private async detectBruteForceAttack(ipAddress: string, now: Date) {
|
||||
const fiveMinutesAgo = new Date(now.getTime() - 5 * 60 * 1000);
|
||||
const recentFailures = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
eventType: SecurityEventType.AUTHENTICATION,
|
||||
outcome: AuditOutcome.FAILURE,
|
||||
ipAddress,
|
||||
timestamp: { gte: fiveMinutesAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (recentFailures >= this.config.thresholds.failedLoginsPerMinute) {
|
||||
return {
|
||||
severity: AlertSeverity.HIGH,
|
||||
type: AlertType.BRUTE_FORCE_ATTACK,
|
||||
title: "Brute Force Attack Detected",
|
||||
description: `${recentFailures} failed login attempts from IP ${ipAddress} in 5 minutes`,
|
||||
metadata: { failedAttempts: recentFailures },
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private async detectSuspiciousAdminActivity(userId: string, now: Date) {
|
||||
const oneHourAgo = new Date(now.getTime() - 60 * 60 * 1000);
|
||||
const adminActions = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
userId,
|
||||
eventType: {
|
||||
in: [
|
||||
SecurityEventType.PLATFORM_ADMIN,
|
||||
SecurityEventType.USER_MANAGEMENT,
|
||||
],
|
||||
},
|
||||
timestamp: { gte: oneHourAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (adminActions >= this.config.thresholds.adminActionsPerHour) {
|
||||
return {
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.UNUSUAL_ADMIN_ACTIVITY,
|
||||
title: "Unusual Admin Activity",
|
||||
description: `User ${userId} performed ${adminActions} admin actions in 1 hour`,
|
||||
metadata: { adminActions },
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private async detectRateLimitBreach(ipAddress: string, now: Date) {
|
||||
const oneMinuteAgo = new Date(now.getTime() - 60 * 1000);
|
||||
const rateLimitViolations = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
outcome: AuditOutcome.RATE_LIMITED,
|
||||
ipAddress,
|
||||
timestamp: { gte: oneMinuteAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (
|
||||
rateLimitViolations >= this.config.thresholds.rateLimitViolationsPerMinute
|
||||
) {
|
||||
return {
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.RATE_LIMIT_BREACH,
|
||||
title: "Rate Limit Breach",
|
||||
description: `IP ${ipAddress} exceeded rate limits ${rateLimitViolations} times in 1 minute`,
|
||||
metadata: { violations: rateLimitViolations },
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private checkGeographicalAnomaly(
|
||||
userId: string,
|
||||
country: string,
|
||||
historicalEvents: Array<{ userId?: string; country?: string }>
|
||||
): AnomalyDetectionResult {
|
||||
const userCountries = new Set(
|
||||
historicalEvents
|
||||
.filter((e) => e.userId === userId && e.country)
|
||||
.map((e) => e.country)
|
||||
);
|
||||
|
||||
if (userCountries.size > 0 && !userCountries.has(country)) {
|
||||
return {
|
||||
isAnomaly: true,
|
||||
confidence: 0.8,
|
||||
type: "geographical_anomaly",
|
||||
description: `User accessing from unusual country: ${country}`,
|
||||
recommendedActions: [
|
||||
"Verify user identity",
|
||||
"Check for compromised credentials",
|
||||
"Consider additional authentication",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No geographical anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
|
||||
private checkTemporalAnomaly(
|
||||
eventType: SecurityEventType,
|
||||
now: Date,
|
||||
historicalEvents: Array<{ timestamp: Date }>,
|
||||
eventBuffer: SecurityEventData[]
|
||||
): AnomalyDetectionResult {
|
||||
const currentHour = now.getHours();
|
||||
const hourlyEvents = historicalEvents.filter(
|
||||
(e) => e.timestamp.getHours() === currentHour
|
||||
);
|
||||
const avgHourlyEvents = hourlyEvents.length / 7; // 7 days average
|
||||
|
||||
const recentHourEvents = eventBuffer.filter(
|
||||
(e) =>
|
||||
e.eventType === eventType &&
|
||||
e.timestamp.getHours() === currentHour &&
|
||||
e.timestamp > new Date(now.getTime() - 60 * 60 * 1000)
|
||||
).length;
|
||||
|
||||
if (recentHourEvents > avgHourlyEvents * 3 && avgHourlyEvents > 0) {
|
||||
return {
|
||||
isAnomaly: true,
|
||||
confidence: 0.7,
|
||||
type: "temporal_anomaly",
|
||||
description: `Unusual activity spike: ${recentHourEvents} events vs ${avgHourlyEvents.toFixed(1)} average`,
|
||||
recommendedActions: [
|
||||
"Investigate source of increased activity",
|
||||
"Check for automated attacks",
|
||||
"Review recent system changes",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No temporal anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
374
lib/services/schedulers/BaseSchedulerService.ts
Normal file
374
lib/services/schedulers/BaseSchedulerService.ts
Normal file
@ -0,0 +1,374 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import cron from "node-cron";
|
||||
|
||||
/**
|
||||
* Scheduler status enumeration
|
||||
*/
|
||||
export enum SchedulerStatus {
|
||||
STOPPED = "STOPPED",
|
||||
STARTING = "STARTING",
|
||||
RUNNING = "RUNNING",
|
||||
PAUSED = "PAUSED",
|
||||
ERROR = "ERROR",
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler configuration interface
|
||||
*/
|
||||
export interface SchedulerConfig {
|
||||
enabled: boolean;
|
||||
interval: string;
|
||||
maxRetries: number;
|
||||
retryDelay: number;
|
||||
timeout: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler metrics interface
|
||||
*/
|
||||
export interface SchedulerMetrics {
|
||||
totalRuns: number;
|
||||
successfulRuns: number;
|
||||
failedRuns: number;
|
||||
lastRunAt: Date | null;
|
||||
lastSuccessAt: Date | null;
|
||||
lastErrorAt: Date | null;
|
||||
averageRunTime: number;
|
||||
currentStatus: SchedulerStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base abstract scheduler service class
|
||||
* Provides common functionality for all schedulers
|
||||
*/
|
||||
export abstract class BaseSchedulerService extends EventEmitter {
|
||||
protected cronJob?: cron.ScheduledTask;
|
||||
protected config: SchedulerConfig;
|
||||
protected status: SchedulerStatus = SchedulerStatus.STOPPED;
|
||||
protected metrics: SchedulerMetrics;
|
||||
protected isRunning = false;
|
||||
|
||||
constructor(
|
||||
protected name: string,
|
||||
config: Partial<SchedulerConfig> = {}
|
||||
) {
|
||||
super();
|
||||
|
||||
this.config = {
|
||||
enabled: true,
|
||||
interval: "*/5 * * * *", // Default: every 5 minutes
|
||||
maxRetries: 3,
|
||||
retryDelay: 5000,
|
||||
timeout: 30000,
|
||||
...config,
|
||||
};
|
||||
|
||||
this.metrics = {
|
||||
totalRuns: 0,
|
||||
successfulRuns: 0,
|
||||
failedRuns: 0,
|
||||
lastRunAt: null,
|
||||
lastSuccessAt: null,
|
||||
lastErrorAt: null,
|
||||
averageRunTime: 0,
|
||||
currentStatus: this.status,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract method that subclasses must implement
|
||||
* Contains the actual scheduler logic
|
||||
*/
|
||||
protected abstract executeTask(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Start the scheduler
|
||||
*/
|
||||
async start(): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
console.log(`[${this.name}] Scheduler disabled via configuration`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.status === SchedulerStatus.RUNNING) {
|
||||
console.warn(`[${this.name}] Scheduler is already running`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.status = SchedulerStatus.STARTING;
|
||||
this.emit("statusChange", this.status);
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Starting scheduler with interval: ${this.config.interval}`
|
||||
);
|
||||
|
||||
this.cronJob = cron.schedule(
|
||||
this.config.interval,
|
||||
() => this.runWithErrorHandling(),
|
||||
{
|
||||
scheduled: false, // Don't start immediately
|
||||
timezone: "UTC",
|
||||
}
|
||||
);
|
||||
|
||||
this.cronJob.start();
|
||||
this.status = SchedulerStatus.RUNNING;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("started");
|
||||
|
||||
console.log(`[${this.name}] Scheduler started successfully`);
|
||||
} catch (error) {
|
||||
this.status = SchedulerStatus.ERROR;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("error", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the scheduler
|
||||
*/
|
||||
async stop(): Promise<void> {
|
||||
if (this.status === SchedulerStatus.STOPPED) {
|
||||
console.warn(`[${this.name}] Scheduler is already stopped`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`[${this.name}] Stopping scheduler...`);
|
||||
|
||||
if (this.cronJob) {
|
||||
this.cronJob.stop();
|
||||
this.cronJob.destroy();
|
||||
this.cronJob = undefined;
|
||||
}
|
||||
|
||||
// Wait for current execution to finish if running
|
||||
while (this.isRunning) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
this.status = SchedulerStatus.STOPPED;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("stopped");
|
||||
|
||||
console.log(`[${this.name}] Scheduler stopped successfully`);
|
||||
} catch (error) {
|
||||
this.status = SchedulerStatus.ERROR;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("error", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause the scheduler
|
||||
*/
|
||||
pause(): void {
|
||||
if (this.cronJob && this.status === SchedulerStatus.RUNNING) {
|
||||
this.cronJob.stop();
|
||||
this.status = SchedulerStatus.PAUSED;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("paused");
|
||||
console.log(`[${this.name}] Scheduler paused`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume the scheduler
|
||||
*/
|
||||
resume(): void {
|
||||
if (this.cronJob && this.status === SchedulerStatus.PAUSED) {
|
||||
this.cronJob.start();
|
||||
this.status = SchedulerStatus.RUNNING;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("resumed");
|
||||
console.log(`[${this.name}] Scheduler resumed`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current scheduler status
|
||||
*/
|
||||
getStatus(): SchedulerStatus {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler metrics
|
||||
*/
|
||||
getMetrics(): SchedulerMetrics {
|
||||
return { ...this.metrics };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler configuration
|
||||
*/
|
||||
getConfig(): SchedulerConfig {
|
||||
return { ...this.config };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update scheduler configuration
|
||||
*/
|
||||
updateConfig(newConfig: Partial<SchedulerConfig>): void {
|
||||
const wasRunning = this.status === SchedulerStatus.RUNNING;
|
||||
|
||||
if (wasRunning) {
|
||||
this.pause();
|
||||
}
|
||||
|
||||
this.config = { ...this.config, ...newConfig };
|
||||
|
||||
if (wasRunning && newConfig.interval) {
|
||||
// Recreate cron job with new interval
|
||||
if (this.cronJob) {
|
||||
this.cronJob.destroy();
|
||||
}
|
||||
|
||||
this.cronJob = cron.schedule(
|
||||
this.config.interval,
|
||||
() => this.runWithErrorHandling(),
|
||||
{
|
||||
scheduled: false,
|
||||
timezone: "UTC",
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (wasRunning) {
|
||||
this.resume();
|
||||
}
|
||||
|
||||
this.emit("configUpdated", this.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual trigger of the scheduler task
|
||||
*/
|
||||
async trigger(): Promise<void> {
|
||||
if (this.isRunning) {
|
||||
throw new Error(`[${this.name}] Task is already running`);
|
||||
}
|
||||
|
||||
await this.runWithErrorHandling();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get health status for load balancer/orchestrator
|
||||
*/
|
||||
getHealthStatus(): {
|
||||
healthy: boolean;
|
||||
status: SchedulerStatus;
|
||||
lastSuccess: Date | null;
|
||||
consecutiveFailures: number;
|
||||
} {
|
||||
const consecutiveFailures = this.calculateConsecutiveFailures();
|
||||
const healthy =
|
||||
this.status === SchedulerStatus.RUNNING &&
|
||||
consecutiveFailures < this.config.maxRetries &&
|
||||
(!this.metrics.lastErrorAt ||
|
||||
!this.metrics.lastSuccessAt ||
|
||||
this.metrics.lastSuccessAt > this.metrics.lastErrorAt);
|
||||
|
||||
return {
|
||||
healthy,
|
||||
status: this.status,
|
||||
lastSuccess: this.metrics.lastSuccessAt,
|
||||
consecutiveFailures,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the task with error handling and metrics collection
|
||||
*/
|
||||
private async runWithErrorHandling(): Promise<void> {
|
||||
if (this.isRunning) {
|
||||
console.warn(
|
||||
`[${this.name}] Previous task still running, skipping this iteration`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
this.isRunning = true;
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
this.metrics.totalRuns++;
|
||||
this.metrics.lastRunAt = new Date();
|
||||
this.emit("taskStarted");
|
||||
|
||||
// Set timeout for task execution
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(
|
||||
() => reject(new Error("Task timeout")),
|
||||
this.config.timeout
|
||||
);
|
||||
});
|
||||
|
||||
await Promise.race([this.executeTask(), timeoutPromise]);
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
this.updateRunTimeMetrics(duration);
|
||||
|
||||
this.metrics.successfulRuns++;
|
||||
this.metrics.lastSuccessAt = new Date();
|
||||
this.emit("taskCompleted", { duration });
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
this.metrics.failedRuns++;
|
||||
this.metrics.lastErrorAt = new Date();
|
||||
|
||||
console.error(`[${this.name}] Task failed:`, error);
|
||||
this.emit("taskFailed", { error, duration });
|
||||
|
||||
// Check if we should retry
|
||||
const consecutiveFailures = this.calculateConsecutiveFailures();
|
||||
if (consecutiveFailures >= this.config.maxRetries) {
|
||||
this.status = SchedulerStatus.ERROR;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
console.error(
|
||||
`[${this.name}] Max retries exceeded, scheduler marked as ERROR`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
this.isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update average run time metrics
|
||||
*/
|
||||
private updateRunTimeMetrics(duration: number): void {
|
||||
if (this.metrics.averageRunTime === 0) {
|
||||
this.metrics.averageRunTime = duration;
|
||||
} else {
|
||||
// Calculate running average
|
||||
this.metrics.averageRunTime =
|
||||
(this.metrics.averageRunTime + duration) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate consecutive failures for health monitoring
|
||||
*/
|
||||
private calculateConsecutiveFailures(): number {
|
||||
// This is a simplified version - in production you might want to track
|
||||
// a rolling window of recent execution results
|
||||
if (!this.metrics.lastSuccessAt || !this.metrics.lastErrorAt) {
|
||||
return this.metrics.failedRuns;
|
||||
}
|
||||
|
||||
return this.metrics.lastErrorAt > this.metrics.lastSuccessAt
|
||||
? this.metrics.failedRuns - this.metrics.successfulRuns
|
||||
: 0;
|
||||
}
|
||||
}
|
||||
317
lib/services/schedulers/CsvImportSchedulerService.ts
Normal file
317
lib/services/schedulers/CsvImportSchedulerService.ts
Normal file
@ -0,0 +1,317 @@
|
||||
import { fetchAndParseCsv } from "../../csvFetcher";
|
||||
import { prisma } from "../../prisma";
|
||||
import {
|
||||
BaseSchedulerService,
|
||||
type SchedulerConfig,
|
||||
} from "./BaseSchedulerService";
|
||||
|
||||
/**
|
||||
* CSV Import specific configuration
|
||||
*/
|
||||
export interface CsvImportSchedulerConfig extends SchedulerConfig {
|
||||
batchSize: number;
|
||||
maxConcurrentImports: number;
|
||||
skipDuplicateCheck: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* CSV Import scheduler service
|
||||
* Handles periodic CSV data import from companies
|
||||
*/
|
||||
export class CsvImportSchedulerService extends BaseSchedulerService {
|
||||
private csvConfig: CsvImportSchedulerConfig;
|
||||
|
||||
constructor(config: Partial<CsvImportSchedulerConfig> = {}) {
|
||||
const defaultConfig = {
|
||||
interval: "*/10 * * * *", // Every 10 minutes
|
||||
timeout: 300000, // 5 minutes timeout
|
||||
batchSize: 10,
|
||||
maxConcurrentImports: 5,
|
||||
skipDuplicateCheck: false,
|
||||
...config,
|
||||
};
|
||||
|
||||
super("CSV Import Scheduler", defaultConfig);
|
||||
this.csvConfig = defaultConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute CSV import task
|
||||
*/
|
||||
protected async executeTask(): Promise<void> {
|
||||
console.log(`[${this.name}] Starting CSV import batch processing...`);
|
||||
|
||||
let totalProcessed = 0;
|
||||
let totalImported = 0;
|
||||
let totalErrors = 0;
|
||||
|
||||
// Process companies in batches to avoid memory issues
|
||||
let skip = 0;
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const companies = await prisma.company.findMany({
|
||||
where: {
|
||||
status: "ACTIVE",
|
||||
csvUrl: { not: null }, // Only companies with CSV URLs
|
||||
},
|
||||
take: this.csvConfig.batchSize,
|
||||
skip: skip,
|
||||
orderBy: { createdAt: "asc" },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
csvUrl: true,
|
||||
csvUsername: true,
|
||||
csvPassword: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (companies.length === 0) {
|
||||
hasMore = false;
|
||||
break;
|
||||
}
|
||||
|
||||
totalProcessed += companies.length;
|
||||
|
||||
// Process companies with controlled concurrency
|
||||
const results = await this.processBatchWithConcurrency(companies);
|
||||
|
||||
results.forEach((result) => {
|
||||
if (result.success) {
|
||||
totalImported += result.importedCount || 0;
|
||||
} else {
|
||||
totalErrors++;
|
||||
console.error(
|
||||
`[${this.name}] Failed to process company ${result.companyId}:`,
|
||||
result.error
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
skip += this.csvConfig.batchSize;
|
||||
|
||||
// Emit progress event
|
||||
this.emit("progress", {
|
||||
processed: totalProcessed,
|
||||
imported: totalImported,
|
||||
errors: totalErrors,
|
||||
});
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Batch processing completed. ` +
|
||||
`Processed: ${totalProcessed}, Imported: ${totalImported}, Errors: ${totalErrors}`
|
||||
);
|
||||
|
||||
// Emit completion metrics
|
||||
this.emit("batchCompleted", {
|
||||
totalProcessed,
|
||||
totalImported,
|
||||
totalErrors,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a batch of companies with controlled concurrency
|
||||
*/
|
||||
private async processBatchWithConcurrency(
|
||||
companies: Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
csvUrl: string | null;
|
||||
csvUsername: string | null;
|
||||
csvPassword: string | null;
|
||||
}>
|
||||
): Promise<
|
||||
Array<{
|
||||
companyId: string;
|
||||
success: boolean;
|
||||
importedCount?: number;
|
||||
error?: Error;
|
||||
}>
|
||||
> {
|
||||
const results: Array<{
|
||||
companyId: string;
|
||||
success: boolean;
|
||||
importedCount?: number;
|
||||
error?: Error;
|
||||
}> = [];
|
||||
|
||||
// Process companies in chunks to control concurrency
|
||||
const chunkSize = this.csvConfig.maxConcurrentImports;
|
||||
for (let i = 0; i < companies.length; i += chunkSize) {
|
||||
const chunk = companies.slice(i, i + chunkSize);
|
||||
|
||||
const chunkResults = await Promise.allSettled(
|
||||
chunk.map((company) => this.processCompanyImport(company))
|
||||
);
|
||||
|
||||
chunkResults.forEach((result, index) => {
|
||||
const company = chunk[index];
|
||||
if (result.status === "fulfilled") {
|
||||
results.push({
|
||||
companyId: company.id,
|
||||
success: true,
|
||||
importedCount: result.value,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
companyId: company.id,
|
||||
success: false,
|
||||
error: result.reason,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process CSV import for a single company
|
||||
*/
|
||||
private async processCompanyImport(company: {
|
||||
id: string;
|
||||
name: string;
|
||||
csvUrl: string | null;
|
||||
csvUsername: string | null;
|
||||
csvPassword: string | null;
|
||||
}): Promise<number> {
|
||||
if (!company.csvUrl) {
|
||||
throw new Error(`Company ${company.name} has no CSV URL configured`);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Processing CSV import for company: ${company.name}`
|
||||
);
|
||||
|
||||
try {
|
||||
// Fetch and parse CSV data
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername || undefined,
|
||||
company.csvPassword || undefined
|
||||
);
|
||||
|
||||
let importedCount = 0;
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
try {
|
||||
// Check for duplicates if not skipping
|
||||
if (!this.csvConfig.skipDuplicateCheck) {
|
||||
const existing = await prisma.sessionImport.findFirst({
|
||||
where: {
|
||||
companyId: company.id,
|
||||
externalId: rawSession.externalId,
|
||||
},
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
console.log(
|
||||
`[${this.name}] Skipping duplicate session: ${rawSession.externalId} for company: ${company.name}`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Create new session import record
|
||||
await prisma.sessionImport.create({
|
||||
data: {
|
||||
companyId: company.id,
|
||||
externalId: rawSession.externalId,
|
||||
csvData: rawSession.csvData,
|
||||
status: "PENDING_PROCESSING",
|
||||
metadata: {
|
||||
importedAt: new Date().toISOString(),
|
||||
csvUrl: company.csvUrl,
|
||||
batchId: `batch_${Date.now()}`,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
importedCount++;
|
||||
} catch (sessionError) {
|
||||
console.error(
|
||||
`[${this.name}] Failed to import session ${rawSession.externalId} for company ${company.name}:`,
|
||||
sessionError
|
||||
);
|
||||
// Continue with other sessions
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Successfully imported ${importedCount} sessions for company: ${company.name}`
|
||||
);
|
||||
|
||||
return importedCount;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[${this.name}] Failed to process CSV import for company ${company.name}:`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get CSV import specific metrics
|
||||
*/
|
||||
getCsvImportMetrics(): {
|
||||
totalCompaniesProcessed: number;
|
||||
totalSessionsImported: number;
|
||||
averageImportTime: number;
|
||||
errorRate: number;
|
||||
} {
|
||||
const baseMetrics = this.getMetrics();
|
||||
|
||||
// These would be enhanced with actual tracking in a production system
|
||||
return {
|
||||
totalCompaniesProcessed: baseMetrics.successfulRuns,
|
||||
totalSessionsImported: 0, // Would track actual import counts
|
||||
averageImportTime: baseMetrics.averageRunTime,
|
||||
errorRate:
|
||||
baseMetrics.totalRuns > 0
|
||||
? baseMetrics.failedRuns / baseMetrics.totalRuns
|
||||
: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger import for a specific company
|
||||
*/
|
||||
async triggerCompanyImport(companyId: string): Promise<number> {
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: companyId },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
csvUrl: true,
|
||||
csvUsername: true,
|
||||
csvPassword: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!company) {
|
||||
throw new Error(`Company with ID ${companyId} not found`);
|
||||
}
|
||||
|
||||
return this.processCompanyImport(company);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update CSV-specific configuration
|
||||
*/
|
||||
updateCsvConfig(newConfig: Partial<CsvImportSchedulerConfig>): void {
|
||||
this.csvConfig = { ...this.csvConfig, ...newConfig };
|
||||
this.updateConfig(newConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get CSV-specific configuration
|
||||
*/
|
||||
getCsvConfig(): CsvImportSchedulerConfig {
|
||||
return { ...this.csvConfig };
|
||||
}
|
||||
}
|
||||
422
lib/services/schedulers/SchedulerManager.ts
Normal file
422
lib/services/schedulers/SchedulerManager.ts
Normal file
@ -0,0 +1,422 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import {
|
||||
type BaseSchedulerService,
|
||||
SchedulerStatus,
|
||||
} from "./BaseSchedulerService";
|
||||
import { CsvImportSchedulerService } from "./CsvImportSchedulerService";
|
||||
|
||||
/**
|
||||
* Scheduler manager configuration
|
||||
*/
|
||||
export interface SchedulerManagerConfig {
|
||||
enabled: boolean;
|
||||
autoRestart: boolean;
|
||||
healthCheckInterval: number;
|
||||
maxRestartAttempts: number;
|
||||
restartDelay: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler registration interface
|
||||
*/
|
||||
export interface SchedulerRegistration {
|
||||
id: string;
|
||||
name: string;
|
||||
service: BaseSchedulerService;
|
||||
autoStart: boolean;
|
||||
critical: boolean; // If true, manager will try to restart on failure
|
||||
}
|
||||
|
||||
/**
|
||||
* Manager health status
|
||||
*/
|
||||
export interface ManagerHealthStatus {
|
||||
healthy: boolean;
|
||||
totalSchedulers: number;
|
||||
runningSchedulers: number;
|
||||
errorSchedulers: number;
|
||||
schedulerStatuses: Record<
|
||||
string,
|
||||
{
|
||||
status: SchedulerStatus;
|
||||
healthy: boolean;
|
||||
lastSuccess: Date | null;
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler Manager
|
||||
* Orchestrates multiple scheduler services for horizontal scaling
|
||||
*/
|
||||
export class SchedulerManager extends EventEmitter {
|
||||
private schedulers = new Map<string, SchedulerRegistration>();
|
||||
private config: SchedulerManagerConfig;
|
||||
private healthCheckTimer?: NodeJS.Timeout;
|
||||
private restartAttempts = new Map<string, number>();
|
||||
|
||||
constructor(config: Partial<SchedulerManagerConfig> = {}) {
|
||||
super();
|
||||
|
||||
this.config = {
|
||||
enabled: true,
|
||||
autoRestart: true,
|
||||
healthCheckInterval: 30000, // 30 seconds
|
||||
maxRestartAttempts: 3,
|
||||
restartDelay: 5000, // 5 seconds
|
||||
...config,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a scheduler service
|
||||
*/
|
||||
registerScheduler(registration: SchedulerRegistration): void {
|
||||
if (this.schedulers.has(registration.id)) {
|
||||
throw new Error(
|
||||
`Scheduler with ID ${registration.id} is already registered`
|
||||
);
|
||||
}
|
||||
|
||||
// Set up event listeners for the scheduler
|
||||
this.setupSchedulerEventListeners(registration);
|
||||
|
||||
this.schedulers.set(registration.id, registration);
|
||||
this.restartAttempts.set(registration.id, 0);
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Registered scheduler: ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerRegistered", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a scheduler service
|
||||
*/
|
||||
async unregisterScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
// Stop the scheduler if running
|
||||
if (registration.service.getStatus() === SchedulerStatus.RUNNING) {
|
||||
await registration.service.stop();
|
||||
}
|
||||
|
||||
// Remove event listeners
|
||||
registration.service.removeAllListeners();
|
||||
|
||||
this.schedulers.delete(schedulerId);
|
||||
this.restartAttempts.delete(schedulerId);
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Unregistered scheduler: ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerUnregistered", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start all registered schedulers
|
||||
*/
|
||||
async startAll(): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
console.log("[Scheduler Manager] Disabled via configuration");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("[Scheduler Manager] Starting all schedulers...");
|
||||
|
||||
const startPromises = Array.from(this.schedulers.values())
|
||||
.filter((reg) => reg.autoStart)
|
||||
.map(async (registration) => {
|
||||
try {
|
||||
await registration.service.start();
|
||||
console.log(`[Scheduler Manager] Started: ${registration.name}`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Failed to start ${registration.name}:`,
|
||||
error
|
||||
);
|
||||
this.emit("schedulerStartFailed", { registration, error });
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(startPromises);
|
||||
|
||||
// Start health monitoring
|
||||
this.startHealthMonitoring();
|
||||
|
||||
console.log("[Scheduler Manager] All schedulers started");
|
||||
this.emit("allSchedulersStarted");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all registered schedulers
|
||||
*/
|
||||
async stopAll(): Promise<void> {
|
||||
console.log("[Scheduler Manager] Stopping all schedulers...");
|
||||
|
||||
// Stop health monitoring
|
||||
this.stopHealthMonitoring();
|
||||
|
||||
const stopPromises = Array.from(this.schedulers.values()).map(
|
||||
async (registration) => {
|
||||
try {
|
||||
await registration.service.stop();
|
||||
console.log(`[Scheduler Manager] Stopped: ${registration.name}`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Failed to stop ${registration.name}:`,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
await Promise.allSettled(stopPromises);
|
||||
|
||||
console.log("[Scheduler Manager] All schedulers stopped");
|
||||
this.emit("allSchedulersStopped");
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a specific scheduler
|
||||
*/
|
||||
async startScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
await registration.service.start();
|
||||
this.emit("schedulerStarted", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a specific scheduler
|
||||
*/
|
||||
async stopScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
await registration.service.stop();
|
||||
this.emit("schedulerStopped", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get health status of all schedulers
|
||||
*/
|
||||
getHealthStatus(): ManagerHealthStatus {
|
||||
const schedulerStatuses: Record<
|
||||
string,
|
||||
{
|
||||
status: SchedulerStatus;
|
||||
healthy: boolean;
|
||||
lastSuccess: Date | null;
|
||||
}
|
||||
> = {};
|
||||
|
||||
let runningCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const [id, registration] of this.schedulers) {
|
||||
const health = registration.service.getHealthStatus();
|
||||
const status = registration.service.getStatus();
|
||||
|
||||
schedulerStatuses[id] = {
|
||||
status,
|
||||
healthy: health.healthy,
|
||||
lastSuccess: health.lastSuccess,
|
||||
};
|
||||
|
||||
if (status === SchedulerStatus.RUNNING) runningCount++;
|
||||
if (status === SchedulerStatus.ERROR) errorCount++;
|
||||
}
|
||||
|
||||
const totalSchedulers = this.schedulers.size;
|
||||
const healthy = errorCount === 0 && runningCount > 0;
|
||||
|
||||
return {
|
||||
healthy,
|
||||
totalSchedulers,
|
||||
runningSchedulers: runningCount,
|
||||
errorSchedulers: errorCount,
|
||||
schedulerStatuses,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered schedulers
|
||||
*/
|
||||
getSchedulers(): Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
status: SchedulerStatus;
|
||||
metrics: any;
|
||||
}> {
|
||||
return Array.from(this.schedulers.entries()).map(([id, registration]) => ({
|
||||
id,
|
||||
name: registration.name,
|
||||
status: registration.service.getStatus(),
|
||||
metrics: registration.service.getMetrics(),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific scheduler
|
||||
*/
|
||||
getScheduler(schedulerId: string): BaseSchedulerService | null {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
return registration ? registration.service : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual execution of a specific scheduler
|
||||
*/
|
||||
async triggerScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
await registration.service.trigger();
|
||||
this.emit("schedulerTriggered", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for a scheduler
|
||||
*/
|
||||
private setupSchedulerEventListeners(
|
||||
registration: SchedulerRegistration
|
||||
): void {
|
||||
const { service } = registration;
|
||||
|
||||
service.on("statusChange", (status: SchedulerStatus) => {
|
||||
this.emit("schedulerStatusChanged", { registration, status });
|
||||
|
||||
// Handle automatic restart for critical schedulers
|
||||
if (
|
||||
status === SchedulerStatus.ERROR &&
|
||||
registration.critical &&
|
||||
this.config.autoRestart
|
||||
) {
|
||||
this.handleSchedulerFailure(registration);
|
||||
}
|
||||
});
|
||||
|
||||
service.on("taskCompleted", (data) => {
|
||||
this.emit("schedulerTaskCompleted", { registration, data });
|
||||
// Reset restart attempts on successful completion
|
||||
this.restartAttempts.set(registration.id, 0);
|
||||
});
|
||||
|
||||
service.on("taskFailed", (data) => {
|
||||
this.emit("schedulerTaskFailed", { registration, data });
|
||||
});
|
||||
|
||||
service.on("error", (error) => {
|
||||
this.emit("schedulerError", { registration, error });
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle scheduler failure with automatic restart
|
||||
*/
|
||||
private async handleSchedulerFailure(
|
||||
registration: SchedulerRegistration
|
||||
): Promise<void> {
|
||||
const attempts = this.restartAttempts.get(registration.id) || 0;
|
||||
|
||||
if (attempts >= this.config.maxRestartAttempts) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Max restart attempts exceeded for ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerRestartFailed", registration);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Attempting to restart ${registration.name} (attempt ${attempts + 1})`
|
||||
);
|
||||
|
||||
// Wait before restart
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, this.config.restartDelay)
|
||||
);
|
||||
|
||||
try {
|
||||
await registration.service.stop();
|
||||
await registration.service.start();
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Successfully restarted ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerRestarted", registration);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Failed to restart ${registration.name}:`,
|
||||
error
|
||||
);
|
||||
this.restartAttempts.set(registration.id, attempts + 1);
|
||||
this.emit("schedulerRestartError", { registration, error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start health monitoring
|
||||
*/
|
||||
private startHealthMonitoring(): void {
|
||||
if (this.healthCheckTimer) return;
|
||||
|
||||
this.healthCheckTimer = setInterval(() => {
|
||||
const health = this.getHealthStatus();
|
||||
this.emit("healthCheck", health);
|
||||
|
||||
if (!health.healthy) {
|
||||
console.warn("[Scheduler Manager] Health check failed:", health);
|
||||
}
|
||||
}, this.config.healthCheckInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop health monitoring
|
||||
*/
|
||||
private stopHealthMonitoring(): void {
|
||||
if (this.healthCheckTimer) {
|
||||
clearInterval(this.healthCheckTimer);
|
||||
this.healthCheckTimer = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and register default schedulers
|
||||
*/
|
||||
static createDefaultSchedulers(): SchedulerManager {
|
||||
const manager = new SchedulerManager();
|
||||
|
||||
// Register CSV Import Scheduler
|
||||
manager.registerScheduler({
|
||||
id: "csv-import",
|
||||
name: "CSV Import Scheduler",
|
||||
service: new CsvImportSchedulerService({
|
||||
interval: "*/10 * * * *", // Every 10 minutes
|
||||
}),
|
||||
autoStart: true,
|
||||
critical: true,
|
||||
});
|
||||
|
||||
// Additional schedulers would be registered here
|
||||
// manager.registerScheduler({
|
||||
// id: "processing",
|
||||
// name: "Session Processing Scheduler",
|
||||
// service: new SessionProcessingSchedulerService(),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
return manager;
|
||||
}
|
||||
}
|
||||
274
lib/services/schedulers/ServerSchedulerIntegration.ts
Normal file
274
lib/services/schedulers/ServerSchedulerIntegration.ts
Normal file
@ -0,0 +1,274 @@
|
||||
import { getSchedulerConfig } from "../../env";
|
||||
import { CsvImportSchedulerService } from "./CsvImportSchedulerService";
|
||||
import { SchedulerManager } from "./SchedulerManager";
|
||||
|
||||
/**
|
||||
* Server-side scheduler integration
|
||||
* Manages all schedulers for the application server
|
||||
*/
|
||||
export class ServerSchedulerIntegration {
|
||||
private static instance: ServerSchedulerIntegration;
|
||||
private manager: SchedulerManager;
|
||||
private isInitialized = false;
|
||||
|
||||
private constructor() {
|
||||
this.manager = new SchedulerManager({
|
||||
enabled: true,
|
||||
autoRestart: true,
|
||||
healthCheckInterval: 30000,
|
||||
maxRestartAttempts: 3,
|
||||
restartDelay: 5000,
|
||||
});
|
||||
|
||||
this.setupManagerEventListeners();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get singleton instance
|
||||
*/
|
||||
static getInstance(): ServerSchedulerIntegration {
|
||||
if (!ServerSchedulerIntegration.instance) {
|
||||
ServerSchedulerIntegration.instance = new ServerSchedulerIntegration();
|
||||
}
|
||||
return ServerSchedulerIntegration.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize schedulers based on environment configuration
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
console.warn("[Server Scheduler Integration] Already initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
const config = getSchedulerConfig();
|
||||
|
||||
if (!config.enabled) {
|
||||
console.log(
|
||||
"[Server Scheduler Integration] Schedulers disabled via configuration"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("[Server Scheduler Integration] Initializing schedulers...");
|
||||
|
||||
// Register CSV Import Scheduler
|
||||
this.manager.registerScheduler({
|
||||
id: "csv-import",
|
||||
name: "CSV Import Scheduler",
|
||||
service: new CsvImportSchedulerService({
|
||||
enabled: config.csvImport.enabled,
|
||||
interval: config.csvImport.interval,
|
||||
timeout: 300000, // 5 minutes
|
||||
batchSize: 10,
|
||||
maxConcurrentImports: 5,
|
||||
}),
|
||||
autoStart: true,
|
||||
critical: true,
|
||||
});
|
||||
|
||||
// TODO: Add other schedulers when they are converted
|
||||
// this.manager.registerScheduler({
|
||||
// id: "import-processing",
|
||||
// name: "Import Processing Scheduler",
|
||||
// service: new ImportProcessingSchedulerService({
|
||||
// enabled: config.importProcessing.enabled,
|
||||
// interval: config.importProcessing.interval,
|
||||
// }),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
// this.manager.registerScheduler({
|
||||
// id: "session-processing",
|
||||
// name: "Session Processing Scheduler",
|
||||
// service: new SessionProcessingSchedulerService({
|
||||
// enabled: config.sessionProcessing.enabled,
|
||||
// interval: config.sessionProcessing.interval,
|
||||
// }),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
// this.manager.registerScheduler({
|
||||
// id: "batch-processing",
|
||||
// name: "Batch Processing Scheduler",
|
||||
// service: new BatchProcessingSchedulerService({
|
||||
// enabled: config.batchProcessing.enabled,
|
||||
// interval: config.batchProcessing.interval,
|
||||
// }),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
// Start all registered schedulers
|
||||
await this.manager.startAll();
|
||||
|
||||
this.isInitialized = true;
|
||||
console.log(
|
||||
"[Server Scheduler Integration] All schedulers initialized successfully"
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[Server Scheduler Integration] Failed to initialize schedulers:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Shutdown all schedulers
|
||||
*/
|
||||
async shutdown(): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
console.warn("[Server Scheduler Integration] Not initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("[Server Scheduler Integration] Shutting down schedulers...");
|
||||
await this.manager.stopAll();
|
||||
this.isInitialized = false;
|
||||
console.log("[Server Scheduler Integration] All schedulers stopped");
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[Server Scheduler Integration] Error during shutdown:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler manager for external access
|
||||
*/
|
||||
getManager(): SchedulerManager {
|
||||
return this.manager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get health status of all schedulers
|
||||
*/
|
||||
getHealthStatus() {
|
||||
return this.manager.getHealthStatus();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of all schedulers with their status
|
||||
*/
|
||||
getSchedulersList() {
|
||||
return this.manager.getSchedulers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual execution of a specific scheduler
|
||||
*/
|
||||
async triggerScheduler(schedulerId: string): Promise<void> {
|
||||
return this.manager.triggerScheduler(schedulerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a specific scheduler
|
||||
*/
|
||||
async startScheduler(schedulerId: string): Promise<void> {
|
||||
return this.manager.startScheduler(schedulerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a specific scheduler
|
||||
*/
|
||||
async stopScheduler(schedulerId: string): Promise<void> {
|
||||
return this.manager.stopScheduler(schedulerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for the manager
|
||||
*/
|
||||
private setupManagerEventListeners(): void {
|
||||
this.manager.on("schedulerStatusChanged", ({ registration, status }) => {
|
||||
console.log(
|
||||
`[Server Scheduler Integration] ${registration.name} status changed to: ${status}`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerTaskCompleted", ({ registration, data }) => {
|
||||
console.log(
|
||||
`[Server Scheduler Integration] ${registration.name} task completed in ${data.duration}ms`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerTaskFailed", ({ registration, data }) => {
|
||||
console.error(
|
||||
`[Server Scheduler Integration] ${registration.name} task failed:`,
|
||||
data.error
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerRestarted", (registration) => {
|
||||
console.log(
|
||||
`[Server Scheduler Integration] Successfully restarted: ${registration.name}`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerRestartFailed", (registration) => {
|
||||
console.error(
|
||||
`[Server Scheduler Integration] Failed to restart: ${registration.name}`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("healthCheck", (health) => {
|
||||
if (!health.healthy) {
|
||||
console.warn("[Server Scheduler Integration] Health check failed:", {
|
||||
totalSchedulers: health.totalSchedulers,
|
||||
runningSchedulers: health.runningSchedulers,
|
||||
errorSchedulers: health.errorSchedulers,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle graceful shutdown
|
||||
*/
|
||||
async handleGracefulShutdown(): Promise<void> {
|
||||
console.log(
|
||||
"[Server Scheduler Integration] Received shutdown signal, stopping schedulers..."
|
||||
);
|
||||
|
||||
try {
|
||||
await this.shutdown();
|
||||
console.log("[Server Scheduler Integration] Graceful shutdown completed");
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[Server Scheduler Integration] Error during graceful shutdown:",
|
||||
error
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience function to get the scheduler integration instance
|
||||
*/
|
||||
export const getSchedulerIntegration = () =>
|
||||
ServerSchedulerIntegration.getInstance();
|
||||
|
||||
/**
|
||||
* Initialize schedulers for server startup
|
||||
*/
|
||||
export const initializeSchedulers = async (): Promise<void> => {
|
||||
const integration = getSchedulerIntegration();
|
||||
await integration.initialize();
|
||||
};
|
||||
|
||||
/**
|
||||
* Shutdown schedulers for server shutdown
|
||||
*/
|
||||
export const shutdownSchedulers = async (): Promise<void> => {
|
||||
const integration = getSchedulerIntegration();
|
||||
await integration.shutdown();
|
||||
};
|
||||
272
lib/services/schedulers/StandaloneSchedulerRunner.ts
Normal file
272
lib/services/schedulers/StandaloneSchedulerRunner.ts
Normal file
@ -0,0 +1,272 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Standalone Scheduler Runner
|
||||
* Runs individual schedulers as separate processes for horizontal scaling
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=csv-import
|
||||
* npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=session-processing
|
||||
*/
|
||||
|
||||
import { Command } from "commander";
|
||||
import { validateEnv } from "../../env";
|
||||
import {
|
||||
type BaseSchedulerService,
|
||||
SchedulerStatus,
|
||||
} from "./BaseSchedulerService";
|
||||
import { CsvImportSchedulerService } from "./CsvImportSchedulerService";
|
||||
|
||||
interface SchedulerFactory {
|
||||
[key: string]: () => BaseSchedulerService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Available schedulers for standalone execution
|
||||
*/
|
||||
const AVAILABLE_SCHEDULERS: SchedulerFactory = {
|
||||
"csv-import": () =>
|
||||
new CsvImportSchedulerService({
|
||||
interval: process.env.CSV_IMPORT_INTERVAL || "*/10 * * * *",
|
||||
timeout: Number.parseInt(process.env.CSV_IMPORT_TIMEOUT || "300000"),
|
||||
batchSize: Number.parseInt(process.env.CSV_IMPORT_BATCH_SIZE || "10"),
|
||||
maxConcurrentImports: Number.parseInt(
|
||||
process.env.CSV_IMPORT_MAX_CONCURRENT || "5"
|
||||
),
|
||||
}),
|
||||
|
||||
// Additional schedulers would be added here:
|
||||
// "import-processing": () => new ImportProcessingSchedulerService({
|
||||
// interval: process.env.IMPORT_PROCESSING_INTERVAL || "*/2 * * * *",
|
||||
// }),
|
||||
// "session-processing": () => new SessionProcessingSchedulerService({
|
||||
// interval: process.env.SESSION_PROCESSING_INTERVAL || "*/5 * * * *",
|
||||
// }),
|
||||
// "batch-processing": () => new BatchProcessingSchedulerService({
|
||||
// interval: process.env.BATCH_PROCESSING_INTERVAL || "*/5 * * * *",
|
||||
// }),
|
||||
};
|
||||
|
||||
/**
|
||||
* Standalone Scheduler Runner Class
|
||||
*/
|
||||
class StandaloneSchedulerRunner {
|
||||
private scheduler?: BaseSchedulerService;
|
||||
private isShuttingDown = false;
|
||||
|
||||
constructor(private schedulerName: string) {}
|
||||
|
||||
/**
|
||||
* Run the specified scheduler
|
||||
*/
|
||||
async run(): Promise<void> {
|
||||
try {
|
||||
// Validate environment
|
||||
const envValidation = validateEnv();
|
||||
if (!envValidation.valid) {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Environment validation errors:",
|
||||
envValidation.errors
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Create scheduler instance
|
||||
const factory = AVAILABLE_SCHEDULERS[this.schedulerName];
|
||||
if (!factory) {
|
||||
console.error(
|
||||
`[Standalone Scheduler] Unknown scheduler: ${this.schedulerName}`
|
||||
);
|
||||
console.error(
|
||||
`Available schedulers: ${Object.keys(AVAILABLE_SCHEDULERS).join(", ")}`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
this.scheduler = factory();
|
||||
|
||||
// Setup event listeners
|
||||
this.setupEventListeners();
|
||||
|
||||
// Setup graceful shutdown
|
||||
this.setupGracefulShutdown();
|
||||
|
||||
console.log(`[Standalone Scheduler] Starting ${this.schedulerName}...`);
|
||||
|
||||
// Start the scheduler
|
||||
await this.scheduler.start();
|
||||
|
||||
console.log(`[Standalone Scheduler] ${this.schedulerName} is running`);
|
||||
|
||||
// Keep the process alive
|
||||
this.keepAlive();
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Standalone Scheduler] Failed to start ${this.schedulerName}:`,
|
||||
error
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for the scheduler
|
||||
*/
|
||||
private setupEventListeners(): void {
|
||||
if (!this.scheduler) return;
|
||||
|
||||
this.scheduler.on("statusChange", (status: SchedulerStatus) => {
|
||||
console.log(`[Standalone Scheduler] Status changed to: ${status}`);
|
||||
|
||||
if (status === SchedulerStatus.ERROR && !this.isShuttingDown) {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Scheduler entered ERROR state, exiting..."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
this.scheduler.on("taskCompleted", (data) => {
|
||||
console.log(
|
||||
`[Standalone Scheduler] Task completed in ${data.duration}ms`
|
||||
);
|
||||
});
|
||||
|
||||
this.scheduler.on("taskFailed", (data) => {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Task failed:",
|
||||
data.error?.message || data.error
|
||||
);
|
||||
});
|
||||
|
||||
this.scheduler.on("started", () => {
|
||||
console.log(
|
||||
`[Standalone Scheduler] ${this.schedulerName} started successfully`
|
||||
);
|
||||
});
|
||||
|
||||
this.scheduler.on("stopped", () => {
|
||||
console.log(`[Standalone Scheduler] ${this.schedulerName} stopped`);
|
||||
});
|
||||
|
||||
// Setup health reporting
|
||||
setInterval(() => {
|
||||
if (this.scheduler && !this.isShuttingDown) {
|
||||
const health = this.scheduler.getHealthStatus();
|
||||
const metrics = this.scheduler.getMetrics();
|
||||
|
||||
console.log(
|
||||
`[Standalone Scheduler] Health: ${health.healthy ? "OK" : "UNHEALTHY"}, ` +
|
||||
`Runs: ${metrics.totalRuns}, Success: ${metrics.successfulRuns}, ` +
|
||||
`Failed: ${metrics.failedRuns}, Avg Time: ${metrics.averageRunTime}ms`
|
||||
);
|
||||
}
|
||||
}, 60000); // Every minute
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup graceful shutdown handlers
|
||||
*/
|
||||
private setupGracefulShutdown(): void {
|
||||
const gracefulShutdown = async (signal: string) => {
|
||||
if (this.isShuttingDown) return;
|
||||
|
||||
console.log(
|
||||
`[Standalone Scheduler] Received ${signal}, shutting down gracefully...`
|
||||
);
|
||||
this.isShuttingDown = true;
|
||||
|
||||
try {
|
||||
if (this.scheduler) {
|
||||
await this.scheduler.stop();
|
||||
}
|
||||
console.log("[Standalone Scheduler] Graceful shutdown completed");
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error("[Standalone Scheduler] Error during shutdown:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
process.on("SIGINT", () => gracefulShutdown("SIGINT"));
|
||||
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"));
|
||||
|
||||
process.on("uncaughtException", (error) => {
|
||||
console.error("[Standalone Scheduler] Uncaught exception:", error);
|
||||
gracefulShutdown("uncaughtException");
|
||||
});
|
||||
|
||||
process.on("unhandledRejection", (reason, promise) => {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Unhandled rejection at:",
|
||||
promise,
|
||||
"reason:",
|
||||
reason
|
||||
);
|
||||
gracefulShutdown("unhandledRejection");
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Keep the process alive
|
||||
*/
|
||||
private keepAlive(): void {
|
||||
// Setup periodic health checks
|
||||
setInterval(() => {
|
||||
if (!this.isShuttingDown && this.scheduler) {
|
||||
const status = this.scheduler.getStatus();
|
||||
if (status === SchedulerStatus.ERROR) {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Scheduler is in ERROR state, exiting..."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}, 30000); // Every 30 seconds
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main execution function
|
||||
*/
|
||||
async function main(): Promise<void> {
|
||||
const program = new Command();
|
||||
|
||||
program
|
||||
.name("standalone-scheduler")
|
||||
.description("Run individual schedulers as standalone processes")
|
||||
.version("1.0.0")
|
||||
.requiredOption("-s, --scheduler <name>", "Scheduler name to run")
|
||||
.option("-l, --list", "List available schedulers")
|
||||
.parse();
|
||||
|
||||
const options = program.opts();
|
||||
|
||||
if (options.list) {
|
||||
console.log("Available schedulers:");
|
||||
Object.keys(AVAILABLE_SCHEDULERS).forEach((name) => {
|
||||
console.log(` - ${name}`);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!options.scheduler) {
|
||||
console.error(
|
||||
"Scheduler name is required. Use --list to see available schedulers."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const runner = new StandaloneSchedulerRunner(options.scheduler);
|
||||
await runner.run();
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
main().catch((error) => {
|
||||
console.error("[Standalone Scheduler] Fatal error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
export { StandaloneSchedulerRunner, AVAILABLE_SCHEDULERS };
|
||||
Reference in New Issue
Block a user