mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 12:32:10 +01:00
feat: add repository pattern, service layer architecture, and scheduler management
- Implement repository pattern for data access layer - Add comprehensive service layer for business logic - Create scheduler management system with health monitoring - Add bounded buffer utility for memory management - Enhance security audit logging with retention policies
This commit is contained in:
@ -1,3 +1,4 @@
|
||||
import type { Prisma } from "@prisma/client";
|
||||
import { type NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "../../../../lib/auth";
|
||||
@ -5,7 +6,9 @@ import { prisma } from "../../../../lib/prisma";
|
||||
import { extractClientIP } from "../../../../lib/rateLimiter";
|
||||
import {
|
||||
AuditOutcome,
|
||||
type AuditSeverity,
|
||||
createAuditMetadata,
|
||||
type SecurityEventType,
|
||||
securityAuditLogger,
|
||||
} from "../../../../lib/securityAuditLogger";
|
||||
|
||||
@ -89,26 +92,16 @@ function parseAuditLogFilters(url: URL) {
|
||||
function buildAuditLogWhereClause(
|
||||
companyId: string,
|
||||
filters: ReturnType<typeof parseAuditLogFilters>
|
||||
) {
|
||||
): Prisma.SecurityAuditLogWhereInput {
|
||||
const { eventType, outcome, severity, userId, startDate, endDate } = filters;
|
||||
|
||||
const where: {
|
||||
companyId: string;
|
||||
eventType?: string;
|
||||
outcome?: string;
|
||||
severity?: string;
|
||||
userId?: string;
|
||||
timestamp?: {
|
||||
gte?: Date;
|
||||
lte?: Date;
|
||||
};
|
||||
} = {
|
||||
const where: Prisma.SecurityAuditLogWhereInput = {
|
||||
companyId, // Only show logs for user's company
|
||||
};
|
||||
|
||||
if (eventType) where.eventType = eventType;
|
||||
if (outcome) where.outcome = outcome;
|
||||
if (severity) where.severity = severity;
|
||||
if (eventType) where.eventType = eventType as SecurityEventType;
|
||||
if (outcome) where.outcome = outcome as AuditOutcome;
|
||||
if (severity) where.severity = severity as AuditSeverity;
|
||||
if (userId) where.userId = userId;
|
||||
|
||||
if (startDate || endDate) {
|
||||
|
||||
61
app/api/admin/schedulers/health/route.ts
Normal file
61
app/api/admin/schedulers/health/route.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { getSchedulerIntegration } from "@/lib/services/schedulers/ServerSchedulerIntegration";
|
||||
|
||||
/**
|
||||
* Health check endpoint for schedulers
|
||||
* Used by load balancers and orchestrators for health monitoring
|
||||
*/
|
||||
export async function GET() {
|
||||
try {
|
||||
const integration = getSchedulerIntegration();
|
||||
const health = integration.getHealthStatus();
|
||||
|
||||
// Return appropriate HTTP status based on health
|
||||
const status = health.healthy ? 200 : 503;
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
healthy: health.healthy,
|
||||
status: health.healthy ? "healthy" : "unhealthy",
|
||||
timestamp: new Date().toISOString(),
|
||||
schedulers: {
|
||||
total: health.totalSchedulers,
|
||||
running: health.runningSchedulers,
|
||||
errors: health.errorSchedulers,
|
||||
},
|
||||
details: health.schedulerStatuses,
|
||||
},
|
||||
{ status }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("[Scheduler Health API] Error:", error);
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
healthy: false,
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: "Failed to get scheduler health status",
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Readiness check endpoint
|
||||
* Used by Kubernetes and other orchestrators
|
||||
*/
|
||||
export async function HEAD() {
|
||||
try {
|
||||
const integration = getSchedulerIntegration();
|
||||
const health = integration.getHealthStatus();
|
||||
|
||||
// Return 200 if healthy, 503 if not
|
||||
const status = health.healthy ? 200 : 503;
|
||||
|
||||
return new NextResponse(null, { status });
|
||||
} catch (_error) {
|
||||
return new NextResponse(null, { status: 500 });
|
||||
}
|
||||
}
|
||||
131
app/api/admin/schedulers/route.ts
Normal file
131
app/api/admin/schedulers/route.ts
Normal file
@ -0,0 +1,131 @@
|
||||
import { type NextRequest, NextResponse } from "next/server";
|
||||
import { getSchedulerIntegration } from "@/lib/services/schedulers/ServerSchedulerIntegration";
|
||||
|
||||
/**
|
||||
* Get all schedulers with their status and metrics
|
||||
*/
|
||||
export async function GET() {
|
||||
try {
|
||||
const integration = getSchedulerIntegration();
|
||||
const schedulers = integration.getSchedulersList();
|
||||
const health = integration.getHealthStatus();
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
health,
|
||||
schedulers,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Scheduler Management API] GET Error:", error);
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "Failed to get scheduler information",
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Control scheduler operations (start/stop/trigger)
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { action, schedulerId } = body;
|
||||
|
||||
if (!action) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "Action is required",
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
const integration = getSchedulerIntegration();
|
||||
|
||||
switch (action) {
|
||||
case "start":
|
||||
if (!schedulerId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "schedulerId is required for start action",
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
await integration.startScheduler(schedulerId);
|
||||
break;
|
||||
|
||||
case "stop":
|
||||
if (!schedulerId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "schedulerId is required for stop action",
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
await integration.stopScheduler(schedulerId);
|
||||
break;
|
||||
|
||||
case "trigger":
|
||||
if (!schedulerId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "schedulerId is required for trigger action",
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
await integration.triggerScheduler(schedulerId);
|
||||
break;
|
||||
|
||||
case "startAll":
|
||||
await integration.getManager().startAll();
|
||||
break;
|
||||
|
||||
case "stopAll":
|
||||
await integration.getManager().stopAll();
|
||||
break;
|
||||
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Unknown action: ${action}`,
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Action '${action}' completed successfully`,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Scheduler Management API] POST Error:", error);
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -22,8 +22,8 @@ function convertToMockChatSession(
|
||||
sentiment: string | null;
|
||||
messagesSent: number | null;
|
||||
avgResponseTime: number | null;
|
||||
escalated: boolean;
|
||||
forwardedHr: boolean;
|
||||
escalated: boolean | null;
|
||||
forwardedHr: boolean | null;
|
||||
initialMsg: string | null;
|
||||
fullTranscriptUrl: string | null;
|
||||
summary: string | null;
|
||||
|
||||
@ -17,8 +17,8 @@ function mapPrismaSessionToChatSession(prismaSession: {
|
||||
sentiment: string | null;
|
||||
messagesSent: number | null;
|
||||
avgResponseTime: number | null;
|
||||
escalated: boolean;
|
||||
forwardedHr: boolean;
|
||||
escalated: boolean | null;
|
||||
forwardedHr: boolean | null;
|
||||
initialMsg: string | null;
|
||||
fullTranscriptUrl: string | null;
|
||||
summary: string | null;
|
||||
@ -55,8 +55,8 @@ function mapPrismaSessionToChatSession(prismaSession: {
|
||||
sentiment: prismaSession.sentiment ?? null,
|
||||
messagesSent: prismaSession.messagesSent ?? undefined, // Maintain consistency with other nullable fields
|
||||
avgResponseTime: prismaSession.avgResponseTime ?? null,
|
||||
escalated: prismaSession.escalated,
|
||||
forwardedHr: prismaSession.forwardedHr,
|
||||
escalated: prismaSession.escalated ?? false,
|
||||
forwardedHr: prismaSession.forwardedHr ?? false,
|
||||
initialMsg: prismaSession.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: prismaSession.fullTranscriptUrl ?? undefined,
|
||||
summary: prismaSession.summary ?? undefined, // New field
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { CompanyStatus } from "@prisma/client";
|
||||
import { type NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { getServerSession, type Session } from "next-auth";
|
||||
import { platformAuthOptions } from "../../../../lib/platform-auth";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { extractClientIP } from "../../../../lib/rateLimiter";
|
||||
@ -12,7 +12,7 @@ import {
|
||||
|
||||
// GET /api/platform/companies - List all companies
|
||||
export async function GET(request: NextRequest) {
|
||||
let session: any = null;
|
||||
let session: Session | null = null;
|
||||
|
||||
try {
|
||||
session = await getServerSession(platformAuthOptions);
|
||||
@ -139,7 +139,7 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
// POST /api/platform/companies - Create new company
|
||||
export async function POST(request: NextRequest) {
|
||||
let session: any = null;
|
||||
let session: Session | null = null;
|
||||
|
||||
try {
|
||||
session = await getServerSession(platformAuthOptions);
|
||||
@ -229,7 +229,7 @@ export async function POST(request: NextRequest) {
|
||||
name: adminName,
|
||||
role: "ADMIN",
|
||||
companyId: company.id,
|
||||
invitedBy: session.user.email || "platform",
|
||||
invitedBy: session?.user?.email || "platform",
|
||||
invitedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@ -15,7 +15,7 @@ export default function CompanySettingsPage() {
|
||||
const csvUsernameId = useId();
|
||||
const csvPasswordId = useId();
|
||||
const { data: session, status } = useSession();
|
||||
const [_company, setCompany] = useState<Company | null>(null);
|
||||
const [, setCompany] = useState<Company | null>(null);
|
||||
const [csvUrl, setCsvUrl] = useState<string>("");
|
||||
const [csvUsername, setCsvUsername] = useState<string>("");
|
||||
const [csvPassword, setCsvPassword] = useState<string>("");
|
||||
|
||||
@ -470,7 +470,7 @@ function DashboardContent() {
|
||||
const { data: session, status } = useSession();
|
||||
const router = useRouter();
|
||||
const [metrics, setMetrics] = useState<MetricsResult | null>(null);
|
||||
const [company, _setCompany] = useState<Company | null>(null);
|
||||
const [company] = useState<Company | null>(null);
|
||||
const [refreshing, setRefreshing] = useState<boolean>(false);
|
||||
const [isInitialLoad, setIsInitialLoad] = useState<boolean>(true);
|
||||
|
||||
@ -505,27 +505,28 @@ function DashboardContent() {
|
||||
avgSessionLength: null,
|
||||
days: {},
|
||||
languages: {},
|
||||
categories: {},
|
||||
countries: {},
|
||||
belowThresholdCount: 0,
|
||||
// Map sentiment data to individual counts
|
||||
sentimentPositiveCount:
|
||||
overviewData.sentimentDistribution?.find(
|
||||
(s) => s.sentiment === "positive"
|
||||
(s) => s.sentiment === "POSITIVE"
|
||||
)?.count || 0,
|
||||
sentimentNeutralCount:
|
||||
overviewData.sentimentDistribution?.find(
|
||||
(s) => s.sentiment === "neutral"
|
||||
(s) => s.sentiment === "NEUTRAL"
|
||||
)?.count || 0,
|
||||
sentimentNegativeCount:
|
||||
overviewData.sentimentDistribution?.find(
|
||||
(s) => s.sentiment === "negative"
|
||||
(s) => s.sentiment === "NEGATIVE"
|
||||
)?.count || 0,
|
||||
// Map category data to CategoryMetrics format
|
||||
...(overviewData.categoryDistribution && {
|
||||
categories: overviewData.categoryDistribution.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.category] = item.count;
|
||||
if (item.category) {
|
||||
acc[item.category] = item.count;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
|
||||
@ -32,21 +32,21 @@ interface FilterOptions {
|
||||
|
||||
interface FilterSectionProps {
|
||||
filtersExpanded: boolean;
|
||||
setFiltersExpanded: (_expanded: boolean) => void;
|
||||
setFiltersExpanded: (expanded: boolean) => void;
|
||||
searchTerm: string;
|
||||
setSearchTerm: (_term: string) => void;
|
||||
setSearchTerm: (term: string) => void;
|
||||
selectedCategory: string;
|
||||
setSelectedCategory: (_category: string) => void;
|
||||
setSelectedCategory: (category: string) => void;
|
||||
selectedLanguage: string;
|
||||
setSelectedLanguage: (_language: string) => void;
|
||||
setSelectedLanguage: (language: string) => void;
|
||||
startDate: string;
|
||||
setStartDate: (_date: string) => void;
|
||||
setStartDate: (date: string) => void;
|
||||
endDate: string;
|
||||
setEndDate: (_date: string) => void;
|
||||
setEndDate: (date: string) => void;
|
||||
sortKey: string;
|
||||
setSortKey: (_key: string) => void;
|
||||
setSortKey: (key: string) => void;
|
||||
sortOrder: string;
|
||||
setSortOrder: (_order: string) => void;
|
||||
setSortOrder: (order: string) => void;
|
||||
filterOptions: FilterOptions;
|
||||
searchHeadingId: string;
|
||||
searchId: string;
|
||||
@ -392,7 +392,7 @@ function SessionList({
|
||||
interface PaginationProps {
|
||||
currentPage: number;
|
||||
totalPages: number;
|
||||
setCurrentPage: (_page: number | ((_prev: number) => number)) => void;
|
||||
setCurrentPage: (page: number | ((prev: number) => number)) => void;
|
||||
}
|
||||
|
||||
function Pagination({
|
||||
|
||||
@ -505,7 +505,7 @@ export default function CompanyManagement() {
|
||||
}
|
||||
|
||||
fetchCompany();
|
||||
}, [status, session?.user?.isPlatformUser, fetchCompany, router.push]);
|
||||
}, [status, session?.user?.isPlatformUser, fetchCompany, router]);
|
||||
|
||||
const handleSave = async () => {
|
||||
state.setIsSaving(true);
|
||||
|
||||
@ -141,7 +141,7 @@ function usePlatformDashboardState() {
|
||||
const [copiedEmail, setCopiedEmail] = useState(false);
|
||||
const [copiedPassword, setCopiedPassword] = useState(false);
|
||||
const [searchTerm, setSearchTerm] = useState("");
|
||||
const [newCompanyData, setNewCompanyData] = useState({
|
||||
const [newCompanyData, setNewCompanyData] = useState<NewCompanyData>({
|
||||
name: "",
|
||||
csvUrl: "",
|
||||
csvUsername: "",
|
||||
@ -202,9 +202,7 @@ function useFormIds() {
|
||||
*/
|
||||
function renderCompanyFormFields(
|
||||
newCompanyData: NewCompanyData,
|
||||
setNewCompanyData: (
|
||||
updater: (prev: NewCompanyData) => NewCompanyData
|
||||
) => void,
|
||||
setNewCompanyData: React.Dispatch<React.SetStateAction<NewCompanyData>>,
|
||||
formIds: FormIds
|
||||
) {
|
||||
return (
|
||||
|
||||
@ -5,7 +5,7 @@ import { useEffect, useId, useState } from "react";
|
||||
interface DateRangePickerProps {
|
||||
minDate: string;
|
||||
maxDate: string;
|
||||
onDateRangeChange: (_startDate: string, _endDate: string) => void;
|
||||
onDateRangeChange: (startDate: string, endDate: string) => void;
|
||||
initialStartDate?: string;
|
||||
initialEndDate?: string;
|
||||
}
|
||||
|
||||
@ -32,7 +32,7 @@ function renderMessageBubble(
|
||||
rehypePlugins={[rehypeRaw]}
|
||||
components={{
|
||||
p: "span",
|
||||
a: ({ node: _node, ...props }) => (
|
||||
a: ({ node, ...props }) => (
|
||||
<a
|
||||
className="text-sky-600 hover:text-sky-800 underline"
|
||||
{...props}
|
||||
|
||||
@ -15,7 +15,7 @@ interface CSRFProtectedFormProps {
|
||||
children: ReactNode;
|
||||
action: string;
|
||||
method?: "POST" | "PUT" | "DELETE" | "PATCH";
|
||||
onSubmit?: (_formData: FormData) => Promise<void> | void;
|
||||
onSubmit?: (formData: FormData) => Promise<void> | void;
|
||||
className?: string;
|
||||
encType?: string;
|
||||
}
|
||||
|
||||
@ -21,7 +21,7 @@ import {
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
type Api = {
|
||||
fire: (_options?: ConfettiOptions) => void;
|
||||
fire: (options?: ConfettiOptions) => void;
|
||||
};
|
||||
|
||||
type Props = React.ComponentPropsWithRef<"canvas"> & {
|
||||
|
||||
@ -22,10 +22,10 @@ interface CSRFContextType {
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
refreshToken: () => Promise<void>;
|
||||
addTokenToFetch: (_options: RequestInit) => RequestInit;
|
||||
addTokenToFormData: (_formData: FormData) => FormData;
|
||||
addTokenToFetch: (options: RequestInit) => RequestInit;
|
||||
addTokenToFormData: (formData: FormData) => FormData;
|
||||
addTokenToObject: <T extends Record<string, unknown>>(
|
||||
_obj: T
|
||||
obj: T
|
||||
) => T & { csrfToken: string };
|
||||
}
|
||||
|
||||
|
||||
@ -29,7 +29,7 @@ interface SecurityAlert {
|
||||
|
||||
interface SecurityAlertsTableProps {
|
||||
alerts: SecurityAlert[];
|
||||
onAcknowledge: (_alertId: string) => void;
|
||||
onAcknowledge: (alertId: string) => void;
|
||||
}
|
||||
|
||||
export function SecurityAlertsTable({
|
||||
|
||||
408
docs/scheduler-architecture.md
Normal file
408
docs/scheduler-architecture.md
Normal file
@ -0,0 +1,408 @@
|
||||
# Scheduler Architecture for Horizontal Scaling
|
||||
|
||||
This document describes the extracted scheduler architecture that enables horizontal scaling of background processing tasks.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
The scheduler system has been refactored from a monolithic approach to a service-oriented architecture that supports:
|
||||
|
||||
- **Individual Scheduler Services** - Each scheduler runs as a separate service
|
||||
- **Horizontal Scaling** - Multiple instances of the same scheduler can run across different machines
|
||||
- **Health Monitoring** - Built-in health checks for load balancers and orchestrators
|
||||
- **Graceful Shutdown** - Proper handling of shutdown signals for zero-downtime deployments
|
||||
- **Centralized Management** - Optional scheduler manager for coordinated operations
|
||||
|
||||
## Components
|
||||
|
||||
### 1. BaseSchedulerService
|
||||
|
||||
Abstract base class providing common functionality:
|
||||
|
||||
```typescript
|
||||
export abstract class BaseSchedulerService extends EventEmitter {
|
||||
// Common scheduler functionality
|
||||
protected abstract executeTask(): Promise<void>;
|
||||
|
||||
async start(): Promise<void>
|
||||
async stop(): Promise<void>
|
||||
pause(): void
|
||||
resume(): void
|
||||
getHealthStatus(): HealthStatus
|
||||
getMetrics(): SchedulerMetrics
|
||||
}
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Status management (STOPPED, STARTING, RUNNING, PAUSED, ERROR)
|
||||
- Metrics collection (run counts, timing, success/failure rates)
|
||||
- Event emission for monitoring
|
||||
- Configurable intervals and timeouts
|
||||
- Automatic retry handling
|
||||
|
||||
### 2. Individual Scheduler Services
|
||||
|
||||
#### CsvImportSchedulerService
|
||||
|
||||
Handles periodic CSV data import from companies:
|
||||
|
||||
```typescript
|
||||
const csvScheduler = new CsvImportSchedulerService({
|
||||
interval: "*/10 * * * *", // Every 10 minutes
|
||||
batchSize: 10,
|
||||
maxConcurrentImports: 5,
|
||||
timeout: 300000, // 5 minutes
|
||||
});
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Batch processing with configurable concurrency
|
||||
- Duplicate detection
|
||||
- Company-specific error handling
|
||||
- Progress monitoring
|
||||
|
||||
#### Additional Schedulers (To Be Implemented)
|
||||
|
||||
- `ImportProcessingSchedulerService` - Process imported CSV data into sessions
|
||||
- `SessionProcessingSchedulerService` - AI analysis and categorization
|
||||
- `BatchProcessingSchedulerService` - OpenAI Batch API integration
|
||||
|
||||
### 3. SchedulerManager
|
||||
|
||||
Orchestrates multiple schedulers in a single process:
|
||||
|
||||
```typescript
|
||||
const manager = new SchedulerManager();
|
||||
|
||||
manager.registerScheduler({
|
||||
id: "csv-import",
|
||||
name: "CSV Import Scheduler",
|
||||
service: new CsvImportSchedulerService(),
|
||||
autoStart: true,
|
||||
critical: true, // Auto-restart on failure
|
||||
});
|
||||
|
||||
await manager.startAll();
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Automatic restart of failed critical schedulers
|
||||
- Health monitoring across all schedulers
|
||||
- Coordinated start/stop operations
|
||||
- Event aggregation and logging
|
||||
|
||||
### 4. Standalone Scheduler Runner
|
||||
|
||||
Runs individual schedulers as separate processes:
|
||||
|
||||
```bash
|
||||
# Run CSV import scheduler as standalone process
|
||||
npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=csv-import
|
||||
|
||||
# List available schedulers
|
||||
npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --list
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Independent process execution
|
||||
- Environment variable configuration
|
||||
- Graceful shutdown handling
|
||||
- Health reporting for monitoring
|
||||
|
||||
## Deployment Patterns
|
||||
|
||||
### 1. Single Process (Current Default)
|
||||
|
||||
All schedulers run within the main Next.js server process:
|
||||
|
||||
```typescript
|
||||
// server.ts
|
||||
import { initializeSchedulers } from './lib/services/schedulers/ServerSchedulerIntegration';
|
||||
|
||||
await initializeSchedulers();
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Simple deployment
|
||||
- Lower resource usage
|
||||
- Easy local development
|
||||
|
||||
**Cons:**
|
||||
- Limited scalability
|
||||
- Single point of failure
|
||||
- Resource contention
|
||||
|
||||
### 2. Separate Processes
|
||||
|
||||
Each scheduler runs as an independent process:
|
||||
|
||||
```bash
|
||||
# Terminal 1: Main application
|
||||
npm run dev
|
||||
|
||||
# Terminal 2: CSV Import Scheduler
|
||||
npm run scheduler:csv-import
|
||||
|
||||
# Terminal 3: Session Processing Scheduler
|
||||
npm run scheduler:session-processing
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Independent scaling
|
||||
- Fault isolation
|
||||
- Resource optimization per scheduler
|
||||
|
||||
**Cons:**
|
||||
- More complex deployment
|
||||
- Higher resource overhead
|
||||
- Inter-process coordination needed
|
||||
|
||||
### 3. Container Orchestration (Recommended for Production)
|
||||
|
||||
Each scheduler runs in separate containers managed by Kubernetes/Docker Swarm:
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
version: '3.8'
|
||||
services:
|
||||
app:
|
||||
build: .
|
||||
environment:
|
||||
- SCHEDULER_ENABLED=false # Disable in-process schedulers
|
||||
|
||||
csv-import-scheduler:
|
||||
build: .
|
||||
command: npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=csv-import
|
||||
environment:
|
||||
- CSV_IMPORT_INTERVAL=*/10 * * * *
|
||||
- CSV_IMPORT_BATCH_SIZE=10
|
||||
|
||||
session-processing-scheduler:
|
||||
build: .
|
||||
command: npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=session-processing
|
||||
environment:
|
||||
- SESSION_PROCESSING_INTERVAL=*/5 * * * *
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Full horizontal scaling
|
||||
- Independent resource allocation
|
||||
- Health monitoring integration
|
||||
- Zero-downtime deployments
|
||||
|
||||
**Cons:**
|
||||
- Complex orchestration setup
|
||||
- Network latency considerations
|
||||
- Distributed system challenges
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```bash
|
||||
# Global Scheduler Settings
|
||||
SCHEDULER_ENABLED=true
|
||||
SCHEDULER_AUTO_RESTART=true
|
||||
|
||||
# CSV Import Scheduler
|
||||
CSV_IMPORT_INTERVAL="*/10 * * * *"
|
||||
CSV_IMPORT_BATCH_SIZE=10
|
||||
CSV_IMPORT_MAX_CONCURRENT=5
|
||||
CSV_IMPORT_TIMEOUT=300000
|
||||
|
||||
# Import Processing Scheduler
|
||||
IMPORT_PROCESSING_INTERVAL="*/2 * * * *"
|
||||
IMPORT_PROCESSING_TIMEOUT=120000
|
||||
|
||||
# Session Processing Scheduler
|
||||
SESSION_PROCESSING_INTERVAL="*/5 * * * *"
|
||||
SESSION_PROCESSING_BATCH_SIZE=50
|
||||
|
||||
# Batch Processing Scheduler
|
||||
BATCH_PROCESSING_INTERVAL="*/5 * * * *"
|
||||
BATCH_PROCESSING_CHECK_INTERVAL="*/2 * * * *"
|
||||
```
|
||||
|
||||
### Package.json Scripts
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"scheduler:csv-import": "tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=csv-import",
|
||||
"scheduler:import-processing": "tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=import-processing",
|
||||
"scheduler:session-processing": "tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=session-processing",
|
||||
"scheduler:batch-processing": "tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=batch-processing"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Health Monitoring
|
||||
|
||||
### Health Check Endpoints
|
||||
|
||||
```bash
|
||||
# Overall scheduler health
|
||||
GET /api/admin/schedulers/health
|
||||
|
||||
# Scheduler management
|
||||
GET /api/admin/schedulers
|
||||
POST /api/admin/schedulers
|
||||
```
|
||||
|
||||
### Response Format
|
||||
|
||||
```json
|
||||
{
|
||||
"healthy": true,
|
||||
"status": "healthy",
|
||||
"timestamp": "2024-01-15T10:30:00.000Z",
|
||||
"schedulers": {
|
||||
"total": 4,
|
||||
"running": 4,
|
||||
"errors": 0
|
||||
},
|
||||
"details": {
|
||||
"csv-import": {
|
||||
"status": "RUNNING",
|
||||
"healthy": true,
|
||||
"lastSuccess": "2024-01-15T10:25:00.000Z"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Kubernetes Integration
|
||||
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: csv-import-scheduler
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: scheduler
|
||||
image: livedash:latest
|
||||
command: ["npx", "tsx", "lib/services/schedulers/StandaloneSchedulerRunner.ts", "--scheduler=csv-import"]
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /api/admin/schedulers/health
|
||||
port: 3000
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /api/admin/schedulers/health
|
||||
port: 3000
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
```
|
||||
|
||||
## Scaling Strategies
|
||||
|
||||
### 1. Vertical Scaling
|
||||
|
||||
Increase resources for scheduler processes:
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
csv-import-scheduler:
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '2.0'
|
||||
memory: 2G
|
||||
reservations:
|
||||
cpus: '1.0'
|
||||
memory: 1G
|
||||
```
|
||||
|
||||
### 2. Horizontal Scaling
|
||||
|
||||
Run multiple instances of the same scheduler:
|
||||
|
||||
```yaml
|
||||
# Kubernetes
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: csv-import-scheduler
|
||||
spec:
|
||||
replicas: 3 # Multiple instances
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: scheduler
|
||||
env:
|
||||
- name: SCHEDULER_INSTANCE_ID
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.name
|
||||
```
|
||||
|
||||
**Note:** Ensure scheduler logic handles multiple instances correctly (e.g., using database locks or partitioning).
|
||||
|
||||
### 3. Geographic Distribution
|
||||
|
||||
Deploy schedulers across different regions:
|
||||
|
||||
```yaml
|
||||
# Region-specific scheduling
|
||||
csv-import-scheduler-us:
|
||||
environment:
|
||||
- REGION=us
|
||||
- CSV_COMPANIES_FILTER=region:us
|
||||
|
||||
csv-import-scheduler-eu:
|
||||
environment:
|
||||
- REGION=eu
|
||||
- CSV_COMPANIES_FILTER=region:eu
|
||||
```
|
||||
|
||||
## Migration Guide
|
||||
|
||||
### From Current Architecture
|
||||
|
||||
1. **Phase 1: Extract Schedulers**
|
||||
- ✅ Create BaseSchedulerService
|
||||
- ✅ Implement CsvImportSchedulerService
|
||||
- ✅ Create SchedulerManager
|
||||
- ⏳ Implement remaining scheduler services
|
||||
|
||||
2. **Phase 2: Deployment Options**
|
||||
- ✅ Add ServerSchedulerIntegration for backwards compatibility
|
||||
- ✅ Create StandaloneSchedulerRunner
|
||||
- ✅ Add health check endpoints
|
||||
|
||||
3. **Phase 3: Container Support**
|
||||
- ⏳ Create Dockerfile for scheduler containers
|
||||
- ⏳ Add Kubernetes manifests
|
||||
- ⏳ Implement distributed coordination
|
||||
|
||||
4. **Phase 4: Production Migration**
|
||||
- ⏳ Deploy separate scheduler containers
|
||||
- ⏳ Monitor performance and stability
|
||||
- ⏳ Gradually increase horizontal scaling
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Scheduler initialization moved from `server.ts` to `ServerSchedulerIntegration`
|
||||
- Individual scheduler functions replaced with service classes
|
||||
- Configuration moved to environment variables
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **Scalability**: Independent scaling of different scheduler types
|
||||
2. **Reliability**: Fault isolation prevents cascading failures
|
||||
3. **Performance**: Optimized resource allocation per scheduler
|
||||
4. **Monitoring**: Granular health checks and metrics
|
||||
5. **Deployment**: Zero-downtime updates and rollbacks
|
||||
6. **Development**: Easier testing and debugging of individual schedulers
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Implement remaining scheduler services (ImportProcessing, SessionProcessing, BatchProcessing)
|
||||
2. Add distributed coordination for multi-instance schedulers
|
||||
3. Create Kubernetes operators for automatic scaling
|
||||
4. Implement scheduler-specific metrics and dashboards
|
||||
5. Add scheduler performance optimization tools
|
||||
@ -119,11 +119,13 @@ export class AuditLogRetentionManager {
|
||||
};
|
||||
|
||||
if (policy.severityFilter && policy.severityFilter.length > 0) {
|
||||
whereClause.severity = { in: policy.severityFilter as any };
|
||||
whereClause.severity = { in: policy.severityFilter as AuditSeverity[] };
|
||||
}
|
||||
|
||||
if (policy.eventTypeFilter && policy.eventTypeFilter.length > 0) {
|
||||
whereClause.eventType = { in: policy.eventTypeFilter as any };
|
||||
whereClause.eventType = {
|
||||
in: policy.eventTypeFilter as SecurityEventType[],
|
||||
};
|
||||
}
|
||||
|
||||
return whereClause;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import cron from "node-cron";
|
||||
import * as cron from "node-cron";
|
||||
import { executeScheduledRetention } from "./auditLogRetention";
|
||||
import {
|
||||
AuditOutcome,
|
||||
@ -8,7 +8,7 @@ import {
|
||||
} from "./securityAuditLogger";
|
||||
|
||||
export class AuditLogScheduler {
|
||||
private retentionTask: any = null;
|
||||
private retentionTask: cron.ScheduledTask | null = null;
|
||||
private isRunning = false;
|
||||
|
||||
constructor() {
|
||||
@ -130,7 +130,7 @@ export class AuditLogScheduler {
|
||||
} {
|
||||
return {
|
||||
isRunning: this.isRunning,
|
||||
nextExecution: this.retentionTask?.getStatus()?.next || undefined,
|
||||
nextExecution: undefined, // node-cron doesn't provide next execution time
|
||||
schedule: process.env.AUDIT_LOG_RETENTION_SCHEDULE || "0 2 * * 0",
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import type { Company, User } from "@prisma/client";
|
||||
import bcrypt from "bcryptjs";
|
||||
import type { NextAuthOptions } from "next-auth";
|
||||
import CredentialsProvider from "next-auth/providers/credentials";
|
||||
@ -55,7 +56,7 @@ export const authOptions: NextAuthOptions = {
|
||||
email: { label: "Email", type: "email" },
|
||||
password: { label: "Password", type: "password" },
|
||||
},
|
||||
async authorize(credentials, _req) {
|
||||
async authorize(credentials) {
|
||||
if (!credentials?.email || !credentials?.password) {
|
||||
await enhancedSecurityLog(
|
||||
SecurityEventType.AUTHENTICATION,
|
||||
@ -79,7 +80,7 @@ export const authOptions: NextAuthOptions = {
|
||||
|
||||
// Try to get user from cache first
|
||||
const cachedUser = await Cache.getUserByEmail(credentials.email);
|
||||
let fullUser: any = null;
|
||||
let fullUser: (User & { company: Company }) | null = null;
|
||||
|
||||
if (cachedUser) {
|
||||
// Get full user data from database if cached user found
|
||||
|
||||
@ -13,6 +13,7 @@ import {
|
||||
securityAuditLogger,
|
||||
} from "./securityAuditLogger";
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum BatchLogLevel {
|
||||
DEBUG = "DEBUG",
|
||||
INFO = "INFO",
|
||||
@ -20,7 +21,9 @@ export enum BatchLogLevel {
|
||||
ERROR = "ERROR",
|
||||
CRITICAL = "CRITICAL",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum BatchOperation {
|
||||
BATCH_CREATION = "BATCH_CREATION",
|
||||
BATCH_STATUS_CHECK = "BATCH_STATUS_CHECK",
|
||||
@ -33,6 +36,7 @@ export enum BatchOperation {
|
||||
INDIVIDUAL_REQUEST_RETRY = "INDIVIDUAL_REQUEST_RETRY",
|
||||
COST_TRACKING = "COST_TRACKING",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
export interface BatchLogContext {
|
||||
operation: BatchOperation;
|
||||
|
||||
@ -128,7 +128,7 @@ class CircuitBreaker {
|
||||
class BatchProcessingError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly _cause?: Error
|
||||
public readonly cause?: Error
|
||||
) {
|
||||
super(message);
|
||||
this.name = "BatchProcessingError";
|
||||
@ -145,7 +145,7 @@ class CircuitBreakerOpenError extends Error {
|
||||
class RetryableError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly _isRetryable = true
|
||||
public readonly isRetryable = true
|
||||
) {
|
||||
super(message);
|
||||
this.name = "RetryableError";
|
||||
@ -411,7 +411,6 @@ export async function getPendingBatchRequests(
|
||||
},
|
||||
processingStatus: AIRequestStatus.PENDING_BATCHING,
|
||||
batchId: null,
|
||||
sessionId: { not: null },
|
||||
},
|
||||
include: {
|
||||
session: {
|
||||
@ -470,8 +469,6 @@ export async function createBatchRequest(
|
||||
);
|
||||
}
|
||||
|
||||
const _operationId = `batch-create-${crypto.randomUUID()}`;
|
||||
|
||||
try {
|
||||
await batchLogger.log(
|
||||
BatchLogLevel.INFO,
|
||||
@ -1250,8 +1247,26 @@ export async function retryFailedRequests(
|
||||
for (const request of failedRequests) {
|
||||
try {
|
||||
await retryWithBackoff(async () => {
|
||||
// Transform request to match processIndividualRequest interface
|
||||
const transformedRequest = {
|
||||
id: request.id,
|
||||
model: request.model,
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: formatMessagesForProcessing(
|
||||
request.session?.messages || []
|
||||
),
|
||||
},
|
||||
],
|
||||
temperature: 0.1,
|
||||
max_tokens: 1000,
|
||||
processingType: request.processingType,
|
||||
session: request.session,
|
||||
};
|
||||
|
||||
// Process individual request using regular OpenAI API
|
||||
const result = await processIndividualRequest(request);
|
||||
const result = await processIndividualRequest(transformedRequest);
|
||||
await updateProcessingRequestWithResult(request.id, result);
|
||||
}, `Retry individual request ${request.id}`);
|
||||
|
||||
|
||||
@ -226,14 +226,14 @@ export const IntegratedBatchProcessor = {
|
||||
),
|
||||
pendingRequests: originalResult.pendingRequests,
|
||||
inProgressBatches:
|
||||
(batchStats["IN_PROGRESS"] || 0) +
|
||||
(batchStats["VALIDATING"] || 0) +
|
||||
(batchStats["UPLOADING"] || 0) +
|
||||
(batchStats["FINALIZING"] || 0),
|
||||
(batchStats.IN_PROGRESS || 0) +
|
||||
(batchStats.VALIDATING || 0) +
|
||||
(batchStats.UPLOADING || 0) +
|
||||
(batchStats.FINALIZING || 0),
|
||||
completedBatches:
|
||||
(batchStats["COMPLETED"] || 0) + (batchStats["PROCESSED"] || 0),
|
||||
(batchStats.COMPLETED || 0) + (batchStats.PROCESSED || 0),
|
||||
failedRequests:
|
||||
(batchStats["FAILED"] || 0) + (batchStats["CANCELLED"] || 0),
|
||||
(batchStats.FAILED || 0) + (batchStats.CANCELLED || 0),
|
||||
};
|
||||
},
|
||||
"getBatchProcessingStats"
|
||||
|
||||
@ -137,7 +137,7 @@ export function stopOptimizedBatchScheduler(): void {
|
||||
{ task: retryFailedTask, name: "retryFailedTask" },
|
||||
];
|
||||
|
||||
for (const { task, name: _name } of tasks) {
|
||||
for (const { task, name } of tasks) {
|
||||
if (task) {
|
||||
task.stop();
|
||||
task.destroy();
|
||||
|
||||
148
lib/constants.ts
Normal file
148
lib/constants.ts
Normal file
@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Application-wide constants and configuration values
|
||||
* Centralizes magic numbers and reduces code duplication
|
||||
*/
|
||||
|
||||
// Time constants (in milliseconds)
|
||||
export const TIME = {
|
||||
SECOND: 1000,
|
||||
MINUTE: 60 * 1000,
|
||||
HOUR: 60 * 60 * 1000,
|
||||
DAY: 24 * 60 * 60 * 1000,
|
||||
WEEK: 7 * 24 * 60 * 60 * 1000,
|
||||
} as const;
|
||||
|
||||
// Database and query limits
|
||||
export const DATABASE = {
|
||||
CONNECTION_POOL_SIZE: 15,
|
||||
QUERY_TIMEOUT: 30 * TIME.SECOND,
|
||||
MAX_RETRY_ATTEMPTS: 3,
|
||||
RETRY_DELAY_BASE: 1 * TIME.SECOND,
|
||||
RETRY_DELAY_MAX: 30 * TIME.SECOND,
|
||||
} as const;
|
||||
|
||||
// Batch processing configuration
|
||||
export const BATCH_PROCESSING = {
|
||||
MAX_REQUESTS_PER_BATCH: 1000,
|
||||
MIN_STATUS_CHECK_INTERVAL: 1 * TIME.MINUTE,
|
||||
MAX_BATCH_TIMEOUT: 24 * TIME.HOUR,
|
||||
REQUEST_TIMEOUT: 60 * TIME.SECOND,
|
||||
CIRCUIT_BREAKER_TIMEOUT: 5 * TIME.MINUTE,
|
||||
DEFAULT_BATCH_SIZE: 50,
|
||||
} as const;
|
||||
|
||||
// AI Processing limits
|
||||
export const AI_PROCESSING = {
|
||||
MAX_TOKENS: 1000,
|
||||
DEFAULT_TEMPERATURE: 0.7,
|
||||
MAX_RETRIES: 3,
|
||||
TIMEOUT: 60 * TIME.SECOND,
|
||||
} as const;
|
||||
|
||||
// Rate limiting configuration
|
||||
export const RATE_LIMITING = {
|
||||
AUTH_WINDOW: 15 * TIME.MINUTE,
|
||||
AUTH_MAX_ATTEMPTS: 5,
|
||||
REGISTRATION_WINDOW: 1 * TIME.HOUR,
|
||||
REGISTRATION_MAX_ATTEMPTS: 3,
|
||||
PASSWORD_RESET_WINDOW: 15 * TIME.MINUTE,
|
||||
PASSWORD_RESET_MAX_ATTEMPTS: 5,
|
||||
CSP_REPORT_WINDOW: 1 * TIME.MINUTE,
|
||||
CSP_REPORT_MAX_REPORTS: 10,
|
||||
DEFAULT_WINDOW: 1 * TIME.MINUTE,
|
||||
DEFAULT_MAX_REQUESTS: 100,
|
||||
} as const;
|
||||
|
||||
// Security monitoring configuration
|
||||
export const SECURITY_MONITORING = {
|
||||
EVENT_BUFFER_MAX_SIZE: 10000,
|
||||
EVENT_BUFFER_CLEANUP_INTERVAL: 5 * TIME.MINUTE,
|
||||
EVENT_RETENTION_HOURS: 1,
|
||||
ALERT_RETENTION_DAYS: 30,
|
||||
BACKGROUND_PROCESSING_INTERVAL: 30 * TIME.SECOND,
|
||||
THREAT_DETECTION_WINDOW: 1 * TIME.MINUTE,
|
||||
ANOMALY_CONFIDENCE_THRESHOLD: 0.8,
|
||||
} as const;
|
||||
|
||||
// Cache configuration
|
||||
export const CACHE = {
|
||||
DEFAULT_TTL: 5 * TIME.MINUTE,
|
||||
LONG_TTL: 1 * TIME.HOUR,
|
||||
SHORT_TTL: 30 * TIME.SECOND,
|
||||
MAX_SIZE: 10000,
|
||||
CLEANUP_INTERVAL: 5 * TIME.MINUTE,
|
||||
} as const;
|
||||
|
||||
// File processing limits
|
||||
export const FILE_PROCESSING = {
|
||||
MAX_FILE_SIZE: 100 * 1024 * 1024, // 100MB
|
||||
CHUNK_SIZE: 1024 * 1024, // 1MB
|
||||
MAX_CONCURRENT_UPLOADS: 5,
|
||||
UPLOAD_TIMEOUT: 10 * TIME.MINUTE,
|
||||
} as const;
|
||||
|
||||
// Session configuration
|
||||
export const SESSION = {
|
||||
PLATFORM_USER_DURATION: 8 * TIME.HOUR,
|
||||
COMPANY_USER_DURATION: 24 * TIME.HOUR,
|
||||
CLEANUP_INTERVAL: 1 * TIME.HOUR,
|
||||
MAX_CONCURRENT_SESSIONS: 5,
|
||||
} as const;
|
||||
|
||||
// Scheduler configuration
|
||||
export const SCHEDULER = {
|
||||
CSV_IMPORT_INTERVAL: 5 * TIME.MINUTE,
|
||||
PROCESSING_INTERVAL: 2 * TIME.MINUTE,
|
||||
BATCH_CREATION_INTERVAL: 5 * TIME.MINUTE,
|
||||
BATCH_STATUS_CHECK_INTERVAL: 2 * TIME.MINUTE,
|
||||
AUDIT_LOG_CLEANUP_INTERVAL: 24 * TIME.HOUR,
|
||||
DEFAULT_CONCURRENCY: 5,
|
||||
MAX_PROCESSING_TIME: 30 * TIME.MINUTE,
|
||||
} as const;
|
||||
|
||||
// API configuration
|
||||
export const API = {
|
||||
DEFAULT_TIMEOUT: 30 * TIME.SECOND,
|
||||
LONG_TIMEOUT: 2 * TIME.MINUTE,
|
||||
MAX_RETRIES: 3,
|
||||
RETRY_DELAY: 1 * TIME.SECOND,
|
||||
MAX_RESPONSE_SIZE: 10 * 1024 * 1024, // 10MB
|
||||
} as const;
|
||||
|
||||
// Validation limits
|
||||
export const VALIDATION = {
|
||||
MIN_PASSWORD_LENGTH: 12,
|
||||
MAX_PASSWORD_LENGTH: 128,
|
||||
MAX_EMAIL_LENGTH: 254,
|
||||
MAX_NAME_LENGTH: 100,
|
||||
MAX_DESCRIPTION_LENGTH: 1000,
|
||||
MAX_SEARCH_QUERY_LENGTH: 500,
|
||||
} as const;
|
||||
|
||||
// Environment-specific configurations
|
||||
export const ENVIRONMENT = {
|
||||
DEVELOPMENT: {
|
||||
LOG_LEVEL: "debug",
|
||||
MOCK_EXTERNAL_APIS: true,
|
||||
STRICT_CSP: false,
|
||||
},
|
||||
PRODUCTION: {
|
||||
LOG_LEVEL: "warn",
|
||||
MOCK_EXTERNAL_APIS: false,
|
||||
STRICT_CSP: true,
|
||||
},
|
||||
TEST: {
|
||||
LOG_LEVEL: "error",
|
||||
MOCK_EXTERNAL_APIS: true,
|
||||
STRICT_CSP: false,
|
||||
},
|
||||
} as const;
|
||||
|
||||
// Feature flags
|
||||
export const FEATURES = {
|
||||
ENABLE_BATCH_OPTIMIZATION: true,
|
||||
ENABLE_SECURITY_MONITORING: true,
|
||||
ENABLE_PERFORMANCE_TRACKING: true,
|
||||
ENABLE_CACHE: true,
|
||||
ENABLE_RATE_LIMITING: true,
|
||||
} as const;
|
||||
@ -14,7 +14,7 @@ export function generateNonce(): string {
|
||||
if (typeof crypto !== "undefined" && crypto.getRandomValues) {
|
||||
const bytes = new Uint8Array(16);
|
||||
crypto.getRandomValues(bytes);
|
||||
return btoa(String.fromCharCode(...bytes));
|
||||
return btoa(String.fromCharCode.apply(null, Array.from(bytes)));
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
@ -30,10 +30,8 @@ export function buildCSP(config: CSPConfig = {}): string {
|
||||
nonce,
|
||||
isDevelopment = false,
|
||||
reportUri,
|
||||
_enforceMode = true,
|
||||
strictMode = false,
|
||||
allowedExternalDomains = [],
|
||||
_reportingLevel = "violations",
|
||||
} = config;
|
||||
|
||||
// Base directives for all environments
|
||||
|
||||
@ -389,7 +389,12 @@ export function testCSPImplementation(csp: string): {
|
||||
}>;
|
||||
overallScore: number;
|
||||
} {
|
||||
const testResults = [];
|
||||
const testResults: Array<{
|
||||
name: string;
|
||||
passed: boolean;
|
||||
description: string;
|
||||
recommendation?: string;
|
||||
}> = [];
|
||||
|
||||
// Test 1: Script injection protection
|
||||
testResults.push({
|
||||
|
||||
@ -32,19 +32,14 @@ function createDynamicComponent<T = object>(
|
||||
options?: {
|
||||
loading?: ComponentType;
|
||||
ssr?: boolean;
|
||||
suspense?: boolean;
|
||||
}
|
||||
) {
|
||||
const {
|
||||
loading: LoadingComponent = LoadingSpinner,
|
||||
ssr = true,
|
||||
suspense = false,
|
||||
} = options || {};
|
||||
const { loading: LoadingComponent = LoadingSpinner, ssr = true } =
|
||||
options || {};
|
||||
|
||||
return dynamic(importFunc, {
|
||||
loading: () => <LoadingComponent />,
|
||||
ssr,
|
||||
suspense,
|
||||
});
|
||||
}
|
||||
|
||||
@ -70,30 +65,33 @@ export const DynamicAreaChart = createDynamicComponent(
|
||||
);
|
||||
|
||||
// D3 components for data visualization (also heavy)
|
||||
export const DynamicWordCloud = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/charts/WordCloud").then((mod) => ({
|
||||
default: mod.WordCloud,
|
||||
})),
|
||||
{ loading: LoadingSkeleton, ssr: false }
|
||||
);
|
||||
// TODO: Create WordCloud component
|
||||
// export const DynamicWordCloud = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/charts/WordCloud").then((mod) => ({
|
||||
// default: mod.WordCloud,
|
||||
// })),
|
||||
// { loading: LoadingSkeleton, ssr: false }
|
||||
// );
|
||||
|
||||
export const DynamicTreeMap = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/charts/TreeMap").then((mod) => ({
|
||||
default: mod.TreeMap,
|
||||
})),
|
||||
{ loading: LoadingSkeleton, ssr: false }
|
||||
);
|
||||
// TODO: Create TreeMap component
|
||||
// export const DynamicTreeMap = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/charts/TreeMap").then((mod) => ({
|
||||
// default: mod.TreeMap,
|
||||
// })),
|
||||
// { loading: LoadingSkeleton, ssr: false }
|
||||
// );
|
||||
|
||||
// Map components (Leaflet is heavy)
|
||||
export const DynamicLeafletMap = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/maps/LeafletMap").then((mod) => ({
|
||||
default: mod.LeafletMap,
|
||||
})),
|
||||
{ loading: LoadingSkeleton, ssr: false }
|
||||
);
|
||||
// TODO: Create LeafletMap component
|
||||
// export const DynamicLeafletMap = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/maps/LeafletMap").then((mod) => ({
|
||||
// default: mod.LeafletMap,
|
||||
// })),
|
||||
// { loading: LoadingSkeleton, ssr: false }
|
||||
// );
|
||||
|
||||
// Admin panels (only loaded for admin users)
|
||||
export const DynamicAuditLogsPanel = createDynamicComponent(
|
||||
@ -104,95 +102,107 @@ export const DynamicAuditLogsPanel = createDynamicComponent(
|
||||
{ loading: LoadingSkeleton }
|
||||
);
|
||||
|
||||
export const DynamicSecurityMonitoring = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/admin/SecurityMonitoring").then((mod) => ({
|
||||
default: mod.SecurityMonitoring,
|
||||
})),
|
||||
{ loading: LoadingSkeleton }
|
||||
);
|
||||
// TODO: Create SecurityMonitoring component
|
||||
// export const DynamicSecurityMonitoring = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/admin/SecurityMonitoring").then((mod) => ({
|
||||
// default: mod.SecurityMonitoring,
|
||||
// })),
|
||||
// { loading: LoadingSkeleton }
|
||||
// );
|
||||
|
||||
// CSV processing components (only loaded when needed)
|
||||
export const DynamicCSVUploader = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/csv/CSVUploader").then((mod) => ({
|
||||
default: mod.CSVUploader,
|
||||
})),
|
||||
{ loading: LoadingSpinner }
|
||||
);
|
||||
// TODO: Create CSVUploader component
|
||||
// export const DynamicCSVUploader = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/csv/CSVUploader").then((mod) => ({
|
||||
// default: mod.CSVUploader,
|
||||
// })),
|
||||
// { loading: LoadingSpinner }
|
||||
// );
|
||||
|
||||
export const DynamicCSVProcessor = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/csv/CSVProcessor").then((mod) => ({
|
||||
default: mod.CSVProcessor,
|
||||
})),
|
||||
{ loading: LoadingSpinner }
|
||||
);
|
||||
// TODO: Create CSVProcessor component
|
||||
// export const DynamicCSVProcessor = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/csv/CSVProcessor").then((mod) => ({
|
||||
// default: mod.CSVProcessor,
|
||||
// })),
|
||||
// { loading: LoadingSpinner }
|
||||
// );
|
||||
|
||||
// Data table components (heavy when dealing with large datasets)
|
||||
export const DynamicDataTable = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/tables/DataTable").then((mod) => ({
|
||||
default: mod.DataTable,
|
||||
})),
|
||||
{ loading: LoadingSkeleton }
|
||||
);
|
||||
// TODO: Create DataTable component
|
||||
// export const DynamicDataTable = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/tables/DataTable").then((mod) => ({
|
||||
// default: mod.DataTable,
|
||||
// })),
|
||||
// { loading: LoadingSkeleton }
|
||||
// );
|
||||
|
||||
// Modal components (can be heavy with complex forms)
|
||||
export const DynamicUserInviteModal = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/modals/UserInviteModal").then((mod) => ({
|
||||
default: mod.UserInviteModal,
|
||||
})),
|
||||
{ loading: LoadingSpinner }
|
||||
);
|
||||
// TODO: Create UserInviteModal component
|
||||
// export const DynamicUserInviteModal = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/modals/UserInviteModal").then((mod) => ({
|
||||
// default: mod.UserInviteModal,
|
||||
// })),
|
||||
// { loading: LoadingSpinner }
|
||||
// );
|
||||
|
||||
export const DynamicCompanySettingsModal = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/modals/CompanySettingsModal").then((mod) => ({
|
||||
default: mod.CompanySettingsModal,
|
||||
})),
|
||||
{ loading: LoadingSpinner }
|
||||
);
|
||||
// TODO: Create CompanySettingsModal component
|
||||
// export const DynamicCompanySettingsModal = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/modals/CompanySettingsModal").then((mod) => ({
|
||||
// default: mod.CompanySettingsModal,
|
||||
// })),
|
||||
// { loading: LoadingSpinner }
|
||||
// );
|
||||
|
||||
// Text editor components (rich text editors are typically heavy)
|
||||
export const DynamicRichTextEditor = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/editor/RichTextEditor").then((mod) => ({
|
||||
default: mod.RichTextEditor,
|
||||
})),
|
||||
{ loading: LoadingSpinner, ssr: false }
|
||||
);
|
||||
// TODO: Create RichTextEditor component
|
||||
// export const DynamicRichTextEditor = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/editor/RichTextEditor").then((mod) => ({
|
||||
// default: mod.RichTextEditor,
|
||||
// })),
|
||||
// { loading: LoadingSpinner, ssr: false }
|
||||
// );
|
||||
|
||||
// PDF viewers and generators (heavy libraries)
|
||||
export const DynamicPDFViewer = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/pdf/PDFViewer").then((mod) => ({
|
||||
default: mod.PDFViewer,
|
||||
})),
|
||||
{ loading: LoadingSpinner, ssr: false }
|
||||
);
|
||||
// TODO: Create PDFViewer component
|
||||
// export const DynamicPDFViewer = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/pdf/PDFViewer").then((mod) => ({
|
||||
// default: mod.PDFViewer,
|
||||
// })),
|
||||
// { loading: LoadingSpinner, ssr: false }
|
||||
// );
|
||||
|
||||
// Animation libraries (Framer Motion, Lottie, etc.)
|
||||
export const DynamicAnimatedComponent = createDynamicComponent(
|
||||
() =>
|
||||
import("../components/animations/AnimatedComponent").then((mod) => ({
|
||||
default: mod.AnimatedComponent,
|
||||
})),
|
||||
{ loading: LoadingSpinner, ssr: false }
|
||||
);
|
||||
// TODO: Create AnimatedComponent
|
||||
// export const DynamicAnimatedComponent = createDynamicComponent(
|
||||
// () =>
|
||||
// import("../components/animations/AnimatedComponent").then((mod) => ({
|
||||
// default: mod.AnimatedComponent,
|
||||
// })),
|
||||
// { loading: LoadingSpinner, ssr: false }
|
||||
// );
|
||||
|
||||
// React wrapper for React.lazy with Suspense
|
||||
export function createLazyComponent<T = object>(
|
||||
export function createLazyComponent<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
>(
|
||||
importFunc: () => Promise<{ default: ComponentType<T> }>,
|
||||
_fallback: ComponentType = LoadingSpinner
|
||||
fallback: ComponentType = LoadingSpinner
|
||||
) {
|
||||
const LazyComponent = lazy(importFunc);
|
||||
const FallbackComponent = fallback;
|
||||
|
||||
return function WrappedComponent(props: T) {
|
||||
return (
|
||||
<Suspense fallback={<fallback />}>
|
||||
<LazyComponent {...props} />
|
||||
<Suspense fallback={<FallbackComponent />}>
|
||||
<LazyComponent {...(props as T)} />
|
||||
</Suspense>
|
||||
);
|
||||
};
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
// SessionImport to Session processor
|
||||
import { ProcessingStage, SentimentCategory } from "@prisma/client";
|
||||
import { ProcessingStage } from "@prisma/client";
|
||||
import cron from "node-cron";
|
||||
import { withRetry } from "./database-retry";
|
||||
import { getSchedulerConfig } from "./env";
|
||||
@ -77,34 +77,6 @@ function parseEuropeanDate(dateStr: string): Date {
|
||||
return date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to parse sentiment from raw string (fallback only)
|
||||
*/
|
||||
function _parseFallbackSentiment(
|
||||
sentimentRaw: string | null
|
||||
): SentimentCategory | null {
|
||||
if (!sentimentRaw) return null;
|
||||
|
||||
const sentimentStr = sentimentRaw.toLowerCase();
|
||||
if (sentimentStr.includes("positive")) {
|
||||
return SentimentCategory.POSITIVE;
|
||||
}
|
||||
if (sentimentStr.includes("negative")) {
|
||||
return SentimentCategory.NEGATIVE;
|
||||
}
|
||||
return SentimentCategory.NEUTRAL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to parse boolean from raw string (fallback only)
|
||||
*/
|
||||
function _parseFallbackBoolean(rawValue: string | null): boolean | null {
|
||||
if (!rawValue) return null;
|
||||
return ["true", "1", "yes", "escalated", "forwarded"].includes(
|
||||
rawValue.toLowerCase()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse transcript content into Message records
|
||||
*/
|
||||
|
||||
@ -275,7 +275,6 @@ class OpenAIMockServer {
|
||||
custom_id: `req-${i}`,
|
||||
response: {
|
||||
status_code: 200,
|
||||
request_id: `req-${Date.now()}-${i}`,
|
||||
body: response,
|
||||
},
|
||||
});
|
||||
@ -375,11 +374,13 @@ export class MockOpenAIClient {
|
||||
get chat() {
|
||||
return {
|
||||
completions: {
|
||||
create: async (params: unknown) => {
|
||||
create: async (params: any) => {
|
||||
if (openAIMock.isEnabled()) {
|
||||
return openAIMock.mockChatCompletion(params);
|
||||
return openAIMock.mockChatCompletion(params as any);
|
||||
}
|
||||
return this.realClient.chat.completions.create(params);
|
||||
return (this.realClient as any).chat.completions.create(
|
||||
params as any
|
||||
);
|
||||
},
|
||||
},
|
||||
};
|
||||
@ -387,34 +388,34 @@ export class MockOpenAIClient {
|
||||
|
||||
get batches() {
|
||||
return {
|
||||
create: async (params: unknown) => {
|
||||
create: async (params: any) => {
|
||||
if (openAIMock.isEnabled()) {
|
||||
return openAIMock.mockCreateBatch(params);
|
||||
return openAIMock.mockCreateBatch(params as any);
|
||||
}
|
||||
return this.realClient.batches.create(params);
|
||||
return (this.realClient as any).batches.create(params as any);
|
||||
},
|
||||
retrieve: async (batchId: string) => {
|
||||
if (openAIMock.isEnabled()) {
|
||||
return openAIMock.mockGetBatch(batchId);
|
||||
}
|
||||
return this.realClient.batches.retrieve(batchId);
|
||||
return (this.realClient as any).batches.retrieve(batchId);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
get files() {
|
||||
return {
|
||||
create: async (params: unknown) => {
|
||||
create: async (params: any) => {
|
||||
if (openAIMock.isEnabled()) {
|
||||
return openAIMock.mockUploadFile(params);
|
||||
}
|
||||
return this.realClient.files.create(params);
|
||||
return (this.realClient as any).files.create(params);
|
||||
},
|
||||
content: async (fileId: string) => {
|
||||
if (openAIMock.isEnabled()) {
|
||||
return openAIMock.mockGetFileContent(fileId);
|
||||
}
|
||||
return this.realClient.files.content(fileId);
|
||||
return (this.realClient as any).files.content(fileId);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@ -53,9 +53,9 @@ class PerformanceMonitor {
|
||||
});
|
||||
|
||||
// Monitor CLS (Cumulative Layout Shift)
|
||||
this.observeMetric("layout-shift", (entries) => {
|
||||
this.observeMetric("layout-shift", (list) => {
|
||||
let clsValue = 0;
|
||||
for (const entry of entries) {
|
||||
for (const entry of list) {
|
||||
const entryWithValue = entry as PerformanceEntry & {
|
||||
value: number;
|
||||
hadRecentInput: boolean;
|
||||
@ -180,8 +180,8 @@ class PerformanceMonitor {
|
||||
private sendToAnalytics(metricName: string, value: number) {
|
||||
// Placeholder for analytics integration
|
||||
// You could send this to Google Analytics, Vercel Analytics, etc.
|
||||
if (typeof gtag !== "undefined") {
|
||||
gtag("event", "core_web_vital", {
|
||||
if (typeof window !== "undefined" && "gtag" in window) {
|
||||
(window as any).gtag("event", "core_web_vital", {
|
||||
name: metricName,
|
||||
value: Math.round(value),
|
||||
metric_rating: this.getRating(metricName, value),
|
||||
@ -339,11 +339,15 @@ export const ResourceOptimizer = {
|
||||
const scripts = Array.from(document.querySelectorAll("script[src]"));
|
||||
const styles = Array.from(document.querySelectorAll("link[href]"));
|
||||
|
||||
return [...scripts, ...styles].some(
|
||||
(element) =>
|
||||
(element as HTMLScriptElement | HTMLLinkElement).src === url ||
|
||||
(element as HTMLLinkElement).href === url
|
||||
);
|
||||
return [...scripts, ...styles].some((element) => {
|
||||
if (element.tagName === "SCRIPT") {
|
||||
return (element as HTMLScriptElement).src === url;
|
||||
}
|
||||
if (element.tagName === "LINK") {
|
||||
return (element as HTMLLinkElement).href === url;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@ -8,7 +8,7 @@ import { env } from "./env";
|
||||
|
||||
// Add prisma to the NodeJS global type
|
||||
declare const global: {
|
||||
prisma: PrismaClient | undefined;
|
||||
prisma: InstanceType<typeof PrismaClient> | undefined;
|
||||
};
|
||||
|
||||
// Connection pooling configuration
|
||||
|
||||
71
lib/repositories/BaseRepository.ts
Normal file
71
lib/repositories/BaseRepository.ts
Normal file
@ -0,0 +1,71 @@
|
||||
/**
|
||||
* Base repository interface with common CRUD operations
|
||||
*/
|
||||
export interface BaseRepository<T, ID = string> {
|
||||
findById(id: ID): Promise<T | null>;
|
||||
findMany(options?: FindManyOptions<T>): Promise<T[]>;
|
||||
create(data: CreateInput<T>): Promise<T>;
|
||||
update(id: ID, data: UpdateInput<T>): Promise<T | null>;
|
||||
delete(id: ID): Promise<boolean>;
|
||||
count(options?: CountOptions<T>): Promise<number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic find options interface
|
||||
*/
|
||||
export interface FindManyOptions<T> {
|
||||
where?: Partial<T>;
|
||||
orderBy?: Record<keyof T, "asc" | "desc">;
|
||||
skip?: number;
|
||||
take?: number;
|
||||
include?: Record<string, boolean>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic count options interface
|
||||
*/
|
||||
export interface CountOptions<T> {
|
||||
where?: Partial<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create input type - excludes auto-generated fields
|
||||
*/
|
||||
export type CreateInput<T> = Omit<T, "id" | "createdAt" | "updatedAt">;
|
||||
|
||||
/**
|
||||
* Update input type - excludes auto-generated fields and makes all optional
|
||||
*/
|
||||
export type UpdateInput<T> = Partial<Omit<T, "id" | "createdAt" | "updatedAt">>;
|
||||
|
||||
/**
|
||||
* Repository error types
|
||||
*/
|
||||
export class RepositoryError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly code: string,
|
||||
public readonly cause?: Error
|
||||
) {
|
||||
super(message);
|
||||
this.name = "RepositoryError";
|
||||
}
|
||||
}
|
||||
|
||||
export class NotFoundError extends RepositoryError {
|
||||
constructor(entity: string, id: string | number) {
|
||||
super(`${entity} with id ${id} not found`, "NOT_FOUND");
|
||||
}
|
||||
}
|
||||
|
||||
export class ConflictError extends RepositoryError {
|
||||
constructor(message: string, cause?: Error) {
|
||||
super(message, "CONFLICT", cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class ValidationError extends RepositoryError {
|
||||
constructor(message: string, cause?: Error) {
|
||||
super(message, "VALIDATION_ERROR", cause);
|
||||
}
|
||||
}
|
||||
92
lib/repositories/RepositoryFactory.ts
Normal file
92
lib/repositories/RepositoryFactory.ts
Normal file
@ -0,0 +1,92 @@
|
||||
import { SecurityAuditLogRepository } from "./SecurityAuditLogRepository";
|
||||
import { SessionRepository } from "./SessionRepository";
|
||||
import { UserRepository } from "./UserRepository";
|
||||
|
||||
/**
|
||||
* Repository factory for centralized repository management
|
||||
* Implements singleton pattern to ensure single instances
|
||||
*/
|
||||
export class RepositoryFactory {
|
||||
private static instance: RepositoryFactory;
|
||||
|
||||
private sessionRepository?: SessionRepository;
|
||||
private userRepository?: UserRepository;
|
||||
private securityAuditLogRepository?: SecurityAuditLogRepository;
|
||||
|
||||
private constructor() {
|
||||
// Private constructor for singleton
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the singleton instance of RepositoryFactory
|
||||
*/
|
||||
static getInstance(): RepositoryFactory {
|
||||
if (!RepositoryFactory.instance) {
|
||||
RepositoryFactory.instance = new RepositoryFactory();
|
||||
}
|
||||
return RepositoryFactory.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get SessionRepository instance
|
||||
*/
|
||||
getSessionRepository(): SessionRepository {
|
||||
if (!this.sessionRepository) {
|
||||
this.sessionRepository = new SessionRepository();
|
||||
}
|
||||
return this.sessionRepository;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get UserRepository instance
|
||||
*/
|
||||
getUserRepository(): UserRepository {
|
||||
if (!this.userRepository) {
|
||||
this.userRepository = new UserRepository();
|
||||
}
|
||||
return this.userRepository;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get SecurityAuditLogRepository instance
|
||||
*/
|
||||
getSecurityAuditLogRepository(): SecurityAuditLogRepository {
|
||||
if (!this.securityAuditLogRepository) {
|
||||
this.securityAuditLogRepository = new SecurityAuditLogRepository();
|
||||
}
|
||||
return this.securityAuditLogRepository;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all repository instances
|
||||
*/
|
||||
getAllRepositories() {
|
||||
return {
|
||||
sessions: this.getSessionRepository(),
|
||||
users: this.getUserRepository(),
|
||||
securityAuditLogs: this.getSecurityAuditLogRepository(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset all repository instances (useful for testing)
|
||||
*/
|
||||
reset(): void {
|
||||
this.sessionRepository = undefined;
|
||||
this.userRepository = undefined;
|
||||
this.securityAuditLogRepository = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience function to get repository factory instance
|
||||
*/
|
||||
export const repositories = RepositoryFactory.getInstance();
|
||||
|
||||
/**
|
||||
* Convenience functions to get specific repositories
|
||||
*/
|
||||
export const getSessionRepository = () => repositories.getSessionRepository();
|
||||
export const getUserRepository = () => repositories.getUserRepository();
|
||||
export const getSecurityAuditLogRepository = () =>
|
||||
repositories.getSecurityAuditLogRepository();
|
||||
476
lib/repositories/SecurityAuditLogRepository.ts
Normal file
476
lib/repositories/SecurityAuditLogRepository.ts
Normal file
@ -0,0 +1,476 @@
|
||||
import type { Prisma, SecurityAuditLog } from "@prisma/client";
|
||||
import { prisma } from "../prisma";
|
||||
import {
|
||||
AuditOutcome,
|
||||
type AuditSeverity,
|
||||
SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
type BaseRepository,
|
||||
type CountOptions,
|
||||
type CreateInput,
|
||||
type FindManyOptions,
|
||||
RepositoryError,
|
||||
type UpdateInput,
|
||||
} from "./BaseRepository";
|
||||
|
||||
/**
|
||||
* Security audit log with included relations
|
||||
*/
|
||||
export type SecurityAuditLogWithRelations = SecurityAuditLog & {
|
||||
user?: {
|
||||
id: string;
|
||||
email: string;
|
||||
};
|
||||
company?: {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Security audit analytics interface
|
||||
*/
|
||||
export interface SecurityAnalytics {
|
||||
totalEvents: number;
|
||||
eventsByType: Record<SecurityEventType, number>;
|
||||
eventsBySeverity: Record<AuditSeverity, number>;
|
||||
eventsByOutcome: Record<AuditOutcome, number>;
|
||||
topIPs: Array<{ ip: string; count: number }>;
|
||||
topUsers: Array<{ userId: string; email: string; count: number }>;
|
||||
hourlyDistribution: Array<{ hour: number; count: number }>;
|
||||
geoDistribution: Record<string, number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* SecurityAuditLog repository implementing database operations
|
||||
*/
|
||||
export class SecurityAuditLogRepository
|
||||
implements BaseRepository<SecurityAuditLog>
|
||||
{
|
||||
/**
|
||||
* Find audit log by ID
|
||||
*/
|
||||
async findById(id: string): Promise<SecurityAuditLogWithRelations | null> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
user: {
|
||||
select: { id: true, email: true },
|
||||
},
|
||||
company: {
|
||||
select: { id: true, name: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find audit log ${id}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find many audit logs with filters
|
||||
*/
|
||||
async findMany(
|
||||
options?: FindManyOptions<SecurityAuditLog>
|
||||
): Promise<SecurityAuditLogWithRelations[]> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.findMany({
|
||||
where: options?.where as Prisma.SecurityAuditLogWhereInput,
|
||||
orderBy:
|
||||
options?.orderBy as Prisma.SecurityAuditLogOrderByWithRelationInput,
|
||||
skip: options?.skip,
|
||||
take: options?.take,
|
||||
include: {
|
||||
user: {
|
||||
select: { id: true, email: true },
|
||||
},
|
||||
company: {
|
||||
select: { id: true, name: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find audit logs",
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find audit logs by event type
|
||||
*/
|
||||
async findByEventType(
|
||||
eventType: SecurityEventType,
|
||||
limit = 100
|
||||
): Promise<SecurityAuditLog[]> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.findMany({
|
||||
where: { eventType },
|
||||
orderBy: { timestamp: "desc" },
|
||||
take: limit,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find audit logs by event type ${eventType}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find audit logs by IP address within time range
|
||||
*/
|
||||
async findByIPAddress(
|
||||
ipAddress: string,
|
||||
startTime: Date,
|
||||
endTime?: Date
|
||||
): Promise<SecurityAuditLog[]> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
ipAddress,
|
||||
timestamp: {
|
||||
gte: startTime,
|
||||
...(endTime && { lte: endTime }),
|
||||
},
|
||||
},
|
||||
orderBy: { timestamp: "desc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find audit logs by IP ${ipAddress}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find failed authentication attempts
|
||||
*/
|
||||
async findFailedAuthAttempts(
|
||||
ipAddress?: string,
|
||||
timeWindow = 24 * 60 * 60 * 1000 // 24 hours in ms
|
||||
): Promise<SecurityAuditLog[]> {
|
||||
try {
|
||||
const startTime = new Date(Date.now() - timeWindow);
|
||||
return await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
eventType: SecurityEventType.AUTHENTICATION,
|
||||
outcome: AuditOutcome.FAILURE,
|
||||
timestamp: { gte: startTime },
|
||||
...(ipAddress && { ipAddress }),
|
||||
},
|
||||
orderBy: { timestamp: "desc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find failed authentication attempts",
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create audit log entry
|
||||
*/
|
||||
async create(data: CreateInput<SecurityAuditLog>): Promise<SecurityAuditLog> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.create({
|
||||
data: data as Prisma.SecurityAuditLogCreateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to create audit log",
|
||||
"CREATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update audit log (rarely used, mainly for corrections)
|
||||
*/
|
||||
async update(
|
||||
id: string,
|
||||
data: UpdateInput<SecurityAuditLog>
|
||||
): Promise<SecurityAuditLog | null> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.update({
|
||||
where: { id },
|
||||
data: data as Prisma.SecurityAuditLogUpdateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to update audit log ${id}`,
|
||||
"UPDATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete audit log (used for cleanup)
|
||||
*/
|
||||
async delete(id: string): Promise<boolean> {
|
||||
try {
|
||||
await prisma.securityAuditLog.delete({ where: { id } });
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to delete audit log ${id}`,
|
||||
"DELETE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count audit logs with filters
|
||||
*/
|
||||
async count(options?: CountOptions<SecurityAuditLog>): Promise<number> {
|
||||
try {
|
||||
return await prisma.securityAuditLog.count({
|
||||
where: options?.where as Prisma.SecurityAuditLogWhereInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to count audit logs",
|
||||
"COUNT_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get security analytics for dashboard
|
||||
*/
|
||||
async getSecurityAnalytics(
|
||||
startDate: Date,
|
||||
endDate: Date,
|
||||
companyId?: string
|
||||
): Promise<SecurityAnalytics> {
|
||||
try {
|
||||
const whereClause = {
|
||||
timestamp: {
|
||||
gte: startDate,
|
||||
lte: endDate,
|
||||
},
|
||||
...(companyId && { companyId }),
|
||||
};
|
||||
|
||||
const [events, eventsByType, eventsBySeverity, eventsByOutcome] =
|
||||
await Promise.all([
|
||||
prisma.securityAuditLog.findMany({
|
||||
where: whereClause,
|
||||
include: {
|
||||
user: { select: { id: true, email: true } },
|
||||
},
|
||||
}),
|
||||
prisma.securityAuditLog.groupBy({
|
||||
by: ["eventType"],
|
||||
where: whereClause,
|
||||
_count: { eventType: true },
|
||||
}),
|
||||
prisma.securityAuditLog.groupBy({
|
||||
by: ["severity"],
|
||||
where: whereClause,
|
||||
_count: { severity: true },
|
||||
}),
|
||||
prisma.securityAuditLog.groupBy({
|
||||
by: ["outcome"],
|
||||
where: whereClause,
|
||||
_count: { outcome: true },
|
||||
}),
|
||||
]);
|
||||
|
||||
// Process aggregated data
|
||||
const totalEvents = events.length;
|
||||
|
||||
const eventsByTypeMap = eventsByType.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.eventType as SecurityEventType] = item._count.eventType;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<SecurityEventType, number>
|
||||
);
|
||||
|
||||
const eventsBySeverityMap = eventsBySeverity.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.severity as AuditSeverity] = item._count.severity;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AuditSeverity, number>
|
||||
);
|
||||
|
||||
const eventsByOutcomeMap = eventsByOutcome.reduce(
|
||||
(acc, item) => {
|
||||
acc[item.outcome as AuditOutcome] = item._count.outcome;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AuditOutcome, number>
|
||||
);
|
||||
|
||||
// Top IPs
|
||||
const ipCounts = events.reduce(
|
||||
(acc, event) => {
|
||||
if (event.ipAddress) {
|
||||
acc[event.ipAddress] = (acc[event.ipAddress] || 0) + 1;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
const topIPs = Object.entries(ipCounts)
|
||||
.map(([ip, count]) => ({ ip, count }))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 10);
|
||||
|
||||
// Top users
|
||||
const userCounts = events
|
||||
.filter((e) => e.userId && e.user)
|
||||
.reduce(
|
||||
(acc, event) => {
|
||||
const key = event.userId!;
|
||||
if (!acc[key]) {
|
||||
acc[key] = {
|
||||
userId: event.userId!,
|
||||
email: event.user?.email,
|
||||
count: 0,
|
||||
};
|
||||
}
|
||||
acc[key].count++;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, { userId: string; email: string; count: number }>
|
||||
);
|
||||
|
||||
const topUsers = Object.values(userCounts)
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 10);
|
||||
|
||||
// Hourly distribution
|
||||
const hourlyDistribution = Array.from({ length: 24 }, (_, hour) => ({
|
||||
hour,
|
||||
count: events.filter((e) => e.timestamp.getHours() === hour).length,
|
||||
}));
|
||||
|
||||
// Geographic distribution
|
||||
const geoDistribution = events.reduce(
|
||||
(acc, event) => {
|
||||
if (event.country) {
|
||||
acc[event.country] = (acc[event.country] || 0) + 1;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
return {
|
||||
totalEvents,
|
||||
eventsByType: eventsByTypeMap,
|
||||
eventsBySeverity: eventsBySeverityMap,
|
||||
eventsByOutcome: eventsByOutcomeMap,
|
||||
topIPs,
|
||||
topUsers,
|
||||
hourlyDistribution,
|
||||
geoDistribution,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to get security analytics",
|
||||
"ANALYTICS_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old audit logs based on retention policy
|
||||
*/
|
||||
async cleanupOldLogs(retentionDays: number): Promise<number> {
|
||||
try {
|
||||
const cutoffDate = new Date(
|
||||
Date.now() - retentionDays * 24 * 60 * 60 * 1000
|
||||
);
|
||||
|
||||
const result = await prisma.securityAuditLog.deleteMany({
|
||||
where: {
|
||||
timestamp: { lt: cutoffDate },
|
||||
},
|
||||
});
|
||||
|
||||
return result.count;
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to cleanup old audit logs",
|
||||
"CLEANUP_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get suspicious activity summary for an IP
|
||||
*/
|
||||
async getIPActivitySummary(
|
||||
ipAddress: string,
|
||||
hoursBack = 24
|
||||
): Promise<{
|
||||
failedLogins: number;
|
||||
rateLimitViolations: number;
|
||||
uniqueUsersTargeted: number;
|
||||
totalEvents: number;
|
||||
timeSpan: { first: Date | null; last: Date | null };
|
||||
}> {
|
||||
try {
|
||||
const startTime = new Date(Date.now() - hoursBack * 60 * 60 * 1000);
|
||||
|
||||
const events = await this.findByIPAddress(ipAddress, startTime);
|
||||
|
||||
const failedLogins = events.filter(
|
||||
(e) =>
|
||||
e.eventType === SecurityEventType.AUTHENTICATION &&
|
||||
e.outcome === AuditOutcome.FAILURE
|
||||
).length;
|
||||
|
||||
const rateLimitViolations = events.filter(
|
||||
(e) => e.outcome === AuditOutcome.RATE_LIMITED
|
||||
).length;
|
||||
|
||||
const uniqueUsersTargeted = new Set(
|
||||
events.map((e) => e.userId).filter(Boolean)
|
||||
).size;
|
||||
|
||||
const timeSpan = {
|
||||
first: events.length > 0 ? events[events.length - 1].timestamp : null,
|
||||
last: events.length > 0 ? events[0].timestamp : null,
|
||||
};
|
||||
|
||||
return {
|
||||
failedLogins,
|
||||
rateLimitViolations,
|
||||
uniqueUsersTargeted,
|
||||
totalEvents: events.length,
|
||||
timeSpan,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to get IP activity summary for ${ipAddress}`,
|
||||
"ACTIVITY_SUMMARY_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
335
lib/repositories/SessionRepository.ts
Normal file
335
lib/repositories/SessionRepository.ts
Normal file
@ -0,0 +1,335 @@
|
||||
import type { Prisma, Session } from "@prisma/client";
|
||||
import { prisma } from "../prisma";
|
||||
import {
|
||||
type BaseRepository,
|
||||
type CountOptions,
|
||||
type CreateInput,
|
||||
type FindManyOptions,
|
||||
NotFoundError,
|
||||
RepositoryError,
|
||||
type UpdateInput,
|
||||
} from "./BaseRepository";
|
||||
|
||||
/**
|
||||
* Session with included relations
|
||||
*/
|
||||
export type SessionWithRelations = Session & {
|
||||
messages?: Array<{
|
||||
id: string;
|
||||
sessionId: string;
|
||||
timestamp: Date | null;
|
||||
role: string;
|
||||
content: string;
|
||||
order: number;
|
||||
createdAt: Date;
|
||||
}>;
|
||||
company?: {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
sessionImport?: {
|
||||
id: string;
|
||||
status: string;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Session repository implementing database operations
|
||||
*/
|
||||
export class SessionRepository implements BaseRepository<Session> {
|
||||
/**
|
||||
* Find session by ID with optional relations
|
||||
*/
|
||||
async findById(
|
||||
id: string,
|
||||
include?: { messages?: boolean; company?: boolean; sessionImport?: boolean }
|
||||
): Promise<SessionWithRelations | null> {
|
||||
try {
|
||||
return await prisma.session.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
messages: include?.messages
|
||||
? { orderBy: { order: "asc" } }
|
||||
: undefined,
|
||||
company: include?.company
|
||||
? { select: { id: true, name: true } }
|
||||
: undefined,
|
||||
sessionImport: include?.sessionImport
|
||||
? { select: { id: true, status: true } }
|
||||
: undefined,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find session ${id}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find sessions by company ID
|
||||
*/
|
||||
async findByCompanyId(
|
||||
companyId: string,
|
||||
options?: Omit<FindManyOptions<Session>, "where">
|
||||
): Promise<Session[]> {
|
||||
try {
|
||||
return await prisma.session.findMany({
|
||||
where: { companyId },
|
||||
orderBy: options?.orderBy as Prisma.SessionOrderByWithRelationInput,
|
||||
skip: options?.skip,
|
||||
take: options?.take,
|
||||
include: options?.include as Prisma.SessionInclude,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find sessions for company ${companyId}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find sessions by date range
|
||||
*/
|
||||
async findByDateRange(
|
||||
startDate: Date,
|
||||
endDate: Date,
|
||||
companyId?: string
|
||||
): Promise<Session[]> {
|
||||
try {
|
||||
return await prisma.session.findMany({
|
||||
where: {
|
||||
startTime: {
|
||||
gte: startDate,
|
||||
lte: endDate,
|
||||
},
|
||||
...(companyId && { companyId }),
|
||||
},
|
||||
orderBy: { startTime: "desc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find sessions by date range",
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find many sessions with filters
|
||||
*/
|
||||
async findMany(options?: FindManyOptions<Session>): Promise<Session[]> {
|
||||
try {
|
||||
return await prisma.session.findMany({
|
||||
where: options?.where as Prisma.SessionWhereInput,
|
||||
orderBy: options?.orderBy as Prisma.SessionOrderByWithRelationInput,
|
||||
skip: options?.skip,
|
||||
take: options?.take,
|
||||
include: options?.include as Prisma.SessionInclude,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find sessions",
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new session
|
||||
*/
|
||||
async create(data: CreateInput<Session>): Promise<Session> {
|
||||
try {
|
||||
return await prisma.session.create({
|
||||
data: data as Prisma.SessionCreateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to create session",
|
||||
"CREATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session by ID
|
||||
*/
|
||||
async update(
|
||||
id: string,
|
||||
data: UpdateInput<Session>
|
||||
): Promise<Session | null> {
|
||||
try {
|
||||
const session = await this.findById(id);
|
||||
if (!session) {
|
||||
throw new NotFoundError("Session", id);
|
||||
}
|
||||
|
||||
return await prisma.session.update({
|
||||
where: { id },
|
||||
data: data as Prisma.SessionUpdateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
throw new RepositoryError(
|
||||
`Failed to update session ${id}`,
|
||||
"UPDATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete session by ID
|
||||
*/
|
||||
async delete(id: string): Promise<boolean> {
|
||||
try {
|
||||
const session = await this.findById(id);
|
||||
if (!session) {
|
||||
throw new NotFoundError("Session", id);
|
||||
}
|
||||
|
||||
await prisma.session.delete({ where: { id } });
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
throw new RepositoryError(
|
||||
`Failed to delete session ${id}`,
|
||||
"DELETE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count sessions with optional filters
|
||||
*/
|
||||
async count(options?: CountOptions<Session>): Promise<number> {
|
||||
try {
|
||||
return await prisma.session.count({
|
||||
where: options?.where as Prisma.SessionWhereInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to count sessions",
|
||||
"COUNT_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session metrics for a company
|
||||
*/
|
||||
async getSessionMetrics(
|
||||
companyId: string,
|
||||
startDate: Date,
|
||||
endDate: Date
|
||||
): Promise<{
|
||||
totalSessions: number;
|
||||
avgSessionLength: number | null;
|
||||
sentimentDistribution: Record<string, number>;
|
||||
categoryDistribution: Record<string, number>;
|
||||
}> {
|
||||
try {
|
||||
const sessions = await this.findByDateRange(
|
||||
startDate,
|
||||
endDate,
|
||||
companyId
|
||||
);
|
||||
|
||||
const totalSessions = sessions.length;
|
||||
const avgSessionLength =
|
||||
sessions.length > 0
|
||||
? sessions
|
||||
.filter((s) => s.endTime)
|
||||
.reduce((sum, s) => {
|
||||
const duration = s.endTime
|
||||
? (s.endTime.getTime() - s.startTime.getTime()) / 1000
|
||||
: 0;
|
||||
return sum + duration;
|
||||
}, 0) / sessions.filter((s) => s.endTime).length
|
||||
: null;
|
||||
|
||||
const sentimentDistribution = sessions.reduce(
|
||||
(acc, session) => {
|
||||
const sentiment = session.sentiment || "unknown";
|
||||
acc[sentiment] = (acc[sentiment] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
const categoryDistribution = sessions.reduce(
|
||||
(acc, session) => {
|
||||
const category = session.category || "uncategorized";
|
||||
acc[category] = (acc[category] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
return {
|
||||
totalSessions,
|
||||
avgSessionLength,
|
||||
sentimentDistribution,
|
||||
categoryDistribution,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to get session metrics",
|
||||
"METRICS_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find sessions needing AI processing
|
||||
*/
|
||||
async findPendingAIProcessing(limit = 100): Promise<Session[]> {
|
||||
try {
|
||||
return await prisma.session.findMany({
|
||||
where: {
|
||||
OR: [{ sentiment: null }, { category: null }, { summary: null }],
|
||||
},
|
||||
take: limit,
|
||||
orderBy: { createdAt: "asc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find sessions pending AI processing",
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk update sessions
|
||||
*/
|
||||
async bulkUpdate(
|
||||
where: Prisma.SessionWhereInput,
|
||||
data: Prisma.SessionUpdateInput
|
||||
): Promise<number> {
|
||||
try {
|
||||
const result = await prisma.session.updateMany({
|
||||
where,
|
||||
data,
|
||||
});
|
||||
return result.count;
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to bulk update sessions",
|
||||
"BULK_UPDATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
396
lib/repositories/UserRepository.ts
Normal file
396
lib/repositories/UserRepository.ts
Normal file
@ -0,0 +1,396 @@
|
||||
import type { Prisma, User } from "@prisma/client";
|
||||
import { prisma } from "../prisma";
|
||||
import {
|
||||
type BaseRepository,
|
||||
type CountOptions,
|
||||
type CreateInput,
|
||||
type FindManyOptions,
|
||||
NotFoundError,
|
||||
RepositoryError,
|
||||
type UpdateInput,
|
||||
} from "./BaseRepository";
|
||||
|
||||
/**
|
||||
* User with included relations
|
||||
*/
|
||||
export type UserWithRelations = User & {
|
||||
company?: {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
securityAuditLogs?: Array<{
|
||||
id: string;
|
||||
eventType: string;
|
||||
timestamp: Date;
|
||||
outcome: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
/**
|
||||
* User repository implementing database operations
|
||||
*/
|
||||
export class UserRepository implements BaseRepository<User> {
|
||||
/**
|
||||
* Find user by ID with optional relations
|
||||
*/
|
||||
async findById(
|
||||
id: string,
|
||||
include?: { company?: boolean; securityAuditLogs?: boolean }
|
||||
): Promise<UserWithRelations | null> {
|
||||
try {
|
||||
return await prisma.user.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
company: include?.company
|
||||
? { select: { id: true, name: true } }
|
||||
: undefined,
|
||||
securityAuditLogs: include?.securityAuditLogs
|
||||
? {
|
||||
select: {
|
||||
id: true,
|
||||
eventType: true,
|
||||
timestamp: true,
|
||||
outcome: true,
|
||||
},
|
||||
take: 100,
|
||||
orderBy: { timestamp: "desc" },
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find user ${id}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find user by email
|
||||
*/
|
||||
async findByEmail(email: string): Promise<User | null> {
|
||||
try {
|
||||
return await prisma.user.findUnique({
|
||||
where: { email },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find user by email ${email}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find users by company ID
|
||||
*/
|
||||
async findByCompanyId(companyId: string): Promise<User[]> {
|
||||
try {
|
||||
return await prisma.user.findMany({
|
||||
where: { companyId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find users by company ${companyId}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find users by role
|
||||
*/
|
||||
async findByRole(role: string, companyId?: string): Promise<User[]> {
|
||||
try {
|
||||
return await prisma.user.findMany({
|
||||
where: {
|
||||
role,
|
||||
...(companyId && { companyId }),
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to find users by role ${role}`,
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find many users with filters
|
||||
*/
|
||||
async findMany(options?: FindManyOptions<User>): Promise<User[]> {
|
||||
try {
|
||||
return await prisma.user.findMany({
|
||||
where: options?.where as Prisma.UserWhereInput,
|
||||
orderBy: options?.orderBy as Prisma.UserOrderByWithRelationInput,
|
||||
skip: options?.skip,
|
||||
take: options?.take,
|
||||
include: options?.include as Prisma.UserInclude,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find users",
|
||||
"FIND_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new user
|
||||
*/
|
||||
async create(data: CreateInput<User>): Promise<User> {
|
||||
try {
|
||||
return await prisma.user.create({
|
||||
data: data as Prisma.UserCreateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to create user",
|
||||
"CREATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update user by ID
|
||||
*/
|
||||
async update(id: string, data: UpdateInput<User>): Promise<User | null> {
|
||||
try {
|
||||
const user = await this.findById(id);
|
||||
if (!user) {
|
||||
throw new NotFoundError("User", id);
|
||||
}
|
||||
|
||||
return await prisma.user.update({
|
||||
where: { id },
|
||||
data: data as Prisma.UserUpdateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
throw new RepositoryError(
|
||||
`Failed to update user ${id}`,
|
||||
"UPDATE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete user by ID
|
||||
*/
|
||||
async delete(id: string): Promise<boolean> {
|
||||
try {
|
||||
const user = await this.findById(id);
|
||||
if (!user) {
|
||||
throw new NotFoundError("User", id);
|
||||
}
|
||||
|
||||
await prisma.user.delete({ where: { id } });
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
throw new RepositoryError(
|
||||
`Failed to delete user ${id}`,
|
||||
"DELETE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count users with optional filters
|
||||
*/
|
||||
async count(options?: CountOptions<User>): Promise<number> {
|
||||
try {
|
||||
return await prisma.user.count({
|
||||
where: options?.where as Prisma.UserWhereInput,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to count users",
|
||||
"COUNT_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update user last login timestamp
|
||||
*/
|
||||
async updateLastLogin(id: string): Promise<User | null> {
|
||||
try {
|
||||
return await this.update(id, {
|
||||
lastLoginAt: new Date(),
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to update last login for user ${id}`,
|
||||
"UPDATE_LOGIN_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find users with recent security events
|
||||
*/
|
||||
async findUsersWithRecentSecurityEvents(
|
||||
hoursBack = 24,
|
||||
minEvents = 5
|
||||
): Promise<Array<{ user: User; eventCount: number }>> {
|
||||
try {
|
||||
const startTime = new Date(Date.now() - hoursBack * 60 * 60 * 1000);
|
||||
|
||||
const usersWithEvents = await prisma.user.findMany({
|
||||
where: {
|
||||
securityAuditLogs: {
|
||||
some: {
|
||||
timestamp: { gte: startTime },
|
||||
},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
securityAuditLogs: {
|
||||
where: {
|
||||
timestamp: { gte: startTime },
|
||||
},
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return usersWithEvents
|
||||
.map((user) => ({
|
||||
user: {
|
||||
...user,
|
||||
securityAuditLogs: undefined, // Remove from result
|
||||
} as User,
|
||||
eventCount: user.securityAuditLogs?.length || 0,
|
||||
}))
|
||||
.filter((item) => item.eventCount >= minEvents)
|
||||
.sort((a, b) => b.eventCount - a.eventCount);
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find users with recent security events",
|
||||
"SECURITY_EVENTS_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user activity summary
|
||||
*/
|
||||
async getUserActivitySummary(
|
||||
userId: string,
|
||||
hoursBack = 24
|
||||
): Promise<{
|
||||
totalEvents: number;
|
||||
failedLogins: number;
|
||||
successfulLogins: number;
|
||||
rateLimitViolations: number;
|
||||
lastActivity: Date | null;
|
||||
countriesAccessed: string[];
|
||||
}> {
|
||||
try {
|
||||
const startTime = new Date(Date.now() - hoursBack * 60 * 60 * 1000);
|
||||
|
||||
const events = await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
userId,
|
||||
timestamp: { gte: startTime },
|
||||
},
|
||||
orderBy: { timestamp: "desc" },
|
||||
});
|
||||
|
||||
const totalEvents = events.length;
|
||||
const failedLogins = events.filter(
|
||||
(e) => e.eventType === "AUTHENTICATION" && e.outcome === "FAILURE"
|
||||
).length;
|
||||
const successfulLogins = events.filter(
|
||||
(e) => e.eventType === "AUTHENTICATION" && e.outcome === "SUCCESS"
|
||||
).length;
|
||||
const rateLimitViolations = events.filter(
|
||||
(e) => e.outcome === "RATE_LIMITED"
|
||||
).length;
|
||||
const lastActivity = events.length > 0 ? events[0].timestamp : null;
|
||||
const countriesAccessed = [
|
||||
...new Set(events.map((e) => e.country).filter(Boolean)),
|
||||
];
|
||||
|
||||
return {
|
||||
totalEvents,
|
||||
failedLogins,
|
||||
successfulLogins,
|
||||
rateLimitViolations,
|
||||
lastActivity,
|
||||
countriesAccessed,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to get activity summary for user ${userId}`,
|
||||
"ACTIVITY_SUMMARY_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find inactive users (no login for specified days)
|
||||
*/
|
||||
async findInactiveUsers(daysInactive = 30): Promise<User[]> {
|
||||
try {
|
||||
const cutoffDate = new Date(
|
||||
Date.now() - daysInactive * 24 * 60 * 60 * 1000
|
||||
);
|
||||
|
||||
return await prisma.user.findMany({
|
||||
where: {
|
||||
OR: [{ lastLoginAt: { lt: cutoffDate } }, { lastLoginAt: null }],
|
||||
},
|
||||
orderBy: { lastLoginAt: "asc" },
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
"Failed to find inactive users",
|
||||
"FIND_INACTIVE_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search users by name or email
|
||||
*/
|
||||
async searchUsers(query: string, companyId?: string): Promise<User[]> {
|
||||
try {
|
||||
return await prisma.user.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ name: { contains: query, mode: "insensitive" } },
|
||||
{ email: { contains: query, mode: "insensitive" } },
|
||||
],
|
||||
...(companyId && { companyId }),
|
||||
},
|
||||
orderBy: { name: "asc" },
|
||||
take: 50, // Limit results
|
||||
});
|
||||
} catch (error) {
|
||||
throw new RepositoryError(
|
||||
`Failed to search users with query "${query}"`,
|
||||
"SEARCH_ERROR",
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -23,6 +23,7 @@ export interface AuditLogEntry {
|
||||
context?: AuditLogContext;
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum SecurityEventType {
|
||||
AUTHENTICATION = "AUTHENTICATION",
|
||||
AUTHORIZATION = "AUTHORIZATION",
|
||||
@ -37,7 +38,9 @@ export enum SecurityEventType {
|
||||
SYSTEM_CONFIG = "SYSTEM_CONFIG",
|
||||
API_SECURITY = "API_SECURITY",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum AuditOutcome {
|
||||
SUCCESS = "SUCCESS",
|
||||
FAILURE = "FAILURE",
|
||||
@ -45,7 +48,9 @@ export enum AuditOutcome {
|
||||
RATE_LIMITED = "RATE_LIMITED",
|
||||
SUSPICIOUS = "SUSPICIOUS",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum AuditSeverity {
|
||||
INFO = "INFO",
|
||||
LOW = "LOW",
|
||||
@ -53,6 +58,7 @@ export enum AuditSeverity {
|
||||
HIGH = "HIGH",
|
||||
CRITICAL = "CRITICAL",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
class SecurityAuditLogger {
|
||||
private isEnabled: boolean;
|
||||
|
||||
@ -1,11 +1,15 @@
|
||||
import { prisma } from "./prisma";
|
||||
import { SECURITY_MONITORING } from "./constants";
|
||||
import {
|
||||
type AuditLogContext,
|
||||
AuditOutcome,
|
||||
type AuditOutcome,
|
||||
AuditSeverity,
|
||||
SecurityEventType,
|
||||
securityAuditLogger,
|
||||
} from "./securityAuditLogger";
|
||||
import { AlertManagementService } from "./services/AlertManagementService";
|
||||
import { SecurityEventProcessor } from "./services/SecurityEventProcessor";
|
||||
import { SecurityMetricsService } from "./services/SecurityMetricsService";
|
||||
import { ThreatDetectionService } from "./services/ThreatDetectionService";
|
||||
|
||||
// Utility type for deep partial objects
|
||||
type DeepPartial<T> = {
|
||||
@ -27,20 +31,23 @@ export interface SecurityAlert {
|
||||
acknowledgedAt?: Date;
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum AlertSeverity {
|
||||
LOW = "LOW",
|
||||
MEDIUM = "MEDIUM",
|
||||
HIGH = "HIGH",
|
||||
CRITICAL = "CRITICAL",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum AlertType {
|
||||
AUTHENTICATION_ANOMALY = "AUTHENTICATION_ANOMALY",
|
||||
RATE_LIMIT_BREACH = "RATE_LIMIT_BREACH",
|
||||
MULTIPLE_FAILED_LOGINS = "MULTIPLE_FAILED_LOGINS",
|
||||
SUSPICIOUS_IP_ACTIVITY = "SUSPICIOUS_IP_ACTIVITY",
|
||||
PRIVILEGE_ESCALATION = "PRIVILEGE_ESCALATION",
|
||||
DATA_BREACH_ATTEMPT = "DATA_BREACH_ATTEMPT",
|
||||
DATA_BREACH_ATTEMPT = "DATA_BRECH_ATTEMPT",
|
||||
CSRF_ATTACK = "CSRF_ATTACK",
|
||||
CSP_VIOLATION_SPIKE = "CSP_VIOLATION_SPIKE",
|
||||
ACCOUNT_ENUMERATION = "ACCOUNT_ENUMERATION",
|
||||
@ -51,6 +58,7 @@ export enum AlertType {
|
||||
SUSPICIOUS_USER_AGENT = "SUSPICIOUS_USER_AGENT",
|
||||
SESSION_HIJACKING = "SESSION_HIJACKING",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
export interface SecurityMetrics {
|
||||
totalEvents: number;
|
||||
@ -67,12 +75,14 @@ export interface SecurityMetrics {
|
||||
userRiskScores: Array<{ userId: string; email: string; riskScore: number }>;
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum ThreatLevel {
|
||||
LOW = "LOW",
|
||||
MODERATE = "MODERATE",
|
||||
HIGH = "HIGH",
|
||||
CRITICAL = "CRITICAL",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
export interface MonitoringConfig {
|
||||
thresholds: {
|
||||
@ -96,6 +106,7 @@ export interface MonitoringConfig {
|
||||
};
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
export enum AlertChannel {
|
||||
EMAIL = "EMAIL",
|
||||
WEBHOOK = "WEBHOOK",
|
||||
@ -103,6 +114,7 @@ export enum AlertChannel {
|
||||
DISCORD = "DISCORD",
|
||||
PAGERDUTY = "PAGERDUTY",
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars, no-unused-vars */
|
||||
|
||||
export interface AnomalyDetectionResult {
|
||||
isAnomaly: boolean;
|
||||
@ -112,19 +124,26 @@ export interface AnomalyDetectionResult {
|
||||
recommendedActions: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Refactored SecurityMonitoringService that coordinates focused services
|
||||
* Responsibilities: Configuration, coordination, and background processing
|
||||
*/
|
||||
class SecurityMonitoringService {
|
||||
private alerts: SecurityAlert[] = [];
|
||||
private config: MonitoringConfig;
|
||||
private eventBuffer: Array<{
|
||||
timestamp: Date;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
outcome: AuditOutcome;
|
||||
severity: AuditSeverity;
|
||||
}> = [];
|
||||
private eventProcessor: SecurityEventProcessor;
|
||||
private threatDetection: ThreatDetectionService;
|
||||
private alertManagement: AlertManagementService;
|
||||
private metricsService: SecurityMetricsService;
|
||||
|
||||
constructor() {
|
||||
this.config = this.getDefaultConfig();
|
||||
|
||||
// Initialize focused services
|
||||
this.eventProcessor = new SecurityEventProcessor();
|
||||
this.threatDetection = new ThreatDetectionService(this.config);
|
||||
this.alertManagement = new AlertManagementService(this.config);
|
||||
this.metricsService = new SecurityMetricsService();
|
||||
|
||||
this.startBackgroundProcessing();
|
||||
}
|
||||
|
||||
@ -139,30 +158,30 @@ class SecurityMonitoringService {
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
// Add event to buffer for analysis
|
||||
this.eventBuffer.push({
|
||||
timestamp: new Date(),
|
||||
eventType,
|
||||
context,
|
||||
outcome,
|
||||
severity,
|
||||
});
|
||||
this.eventProcessor.addEvent(eventType, outcome, context, severity);
|
||||
|
||||
// Immediate threat detection
|
||||
const threats = await this.detectImediateThreats(
|
||||
const threatResult = await this.threatDetection.detectImmediateThreats(
|
||||
eventType,
|
||||
outcome,
|
||||
context,
|
||||
metadata
|
||||
);
|
||||
|
||||
for (const threat of threats) {
|
||||
await this.createAlert(threat);
|
||||
for (const threat of threatResult.threats) {
|
||||
await this.alertManagement.createAlert(threat);
|
||||
}
|
||||
|
||||
// Anomaly detection
|
||||
const anomaly = await this.detectAnomalies(eventType, context);
|
||||
const recentEvents = this.eventProcessor.getRecentEvents();
|
||||
const anomaly = await this.threatDetection.detectAnomalies(
|
||||
eventType,
|
||||
context,
|
||||
recentEvents
|
||||
);
|
||||
|
||||
if (anomaly.isAnomaly && anomaly.confidence > 0.7) {
|
||||
await this.createAlert({
|
||||
await this.alertManagement.createAlert({
|
||||
severity: this.mapConfidenceToSeverity(anomaly.confidence),
|
||||
type: AlertType.AUTHENTICATION_ANOMALY,
|
||||
title: `Anomaly Detected: ${anomaly.type}`,
|
||||
@ -174,7 +193,7 @@ class SecurityMonitoringService {
|
||||
}
|
||||
|
||||
// Clean old events to prevent memory issues
|
||||
this.cleanupEventBuffer();
|
||||
this.eventProcessor.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -184,115 +203,19 @@ class SecurityMonitoringService {
|
||||
timeRange: { start: Date; end: Date },
|
||||
companyId?: string
|
||||
): Promise<SecurityMetrics> {
|
||||
const whereClause = {
|
||||
timestamp: {
|
||||
gte: timeRange.start,
|
||||
lte: timeRange.end,
|
||||
},
|
||||
...(companyId && { companyId }),
|
||||
};
|
||||
|
||||
// Get audit log data
|
||||
const events = await prisma.securityAuditLog.findMany({
|
||||
where: whereClause,
|
||||
include: {
|
||||
user: { select: { email: true } },
|
||||
company: { select: { name: true } },
|
||||
},
|
||||
});
|
||||
|
||||
// Calculate metrics
|
||||
const totalEvents = events.length;
|
||||
const criticalEvents = events.filter(
|
||||
(e) => e.severity === AuditSeverity.CRITICAL
|
||||
).length;
|
||||
|
||||
const activeAlerts = this.alerts.filter((a) => !a.acknowledged).length;
|
||||
const resolvedAlerts = this.alerts.filter((a) => a.acknowledged).length;
|
||||
|
||||
// Event distribution by type
|
||||
const eventsByType = events.reduce(
|
||||
(acc, event) => {
|
||||
acc[event.eventType] = (acc[event.eventType] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<SecurityEventType, number>
|
||||
const alerts = this.alertManagement.getAlertsInTimeRange(timeRange);
|
||||
return this.metricsService.calculateSecurityMetrics(
|
||||
timeRange,
|
||||
companyId,
|
||||
alerts
|
||||
);
|
||||
|
||||
// Alert distribution by type
|
||||
const alertsByType = this.alerts.reduce(
|
||||
(acc, alert) => {
|
||||
acc[alert.type] = (acc[alert.type] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AlertType, number>
|
||||
);
|
||||
|
||||
// Top threats
|
||||
const topThreats = Object.entries(alertsByType)
|
||||
.map(([type, count]) => ({ type: type as AlertType, count }))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 5);
|
||||
|
||||
// Geographic distribution
|
||||
const geoDistribution = events.reduce(
|
||||
(acc, event) => {
|
||||
if (event.country) {
|
||||
acc[event.country] = (acc[event.country] || 0) + 1;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
// Time distribution (by hour)
|
||||
const timeDistribution = Array.from({ length: 24 }, (_, hour) => ({
|
||||
hour,
|
||||
count: events.filter((e) => e.timestamp.getHours() === hour).length,
|
||||
}));
|
||||
|
||||
// User risk scores
|
||||
const userRiskScores = await this.calculateUserRiskScores(events);
|
||||
|
||||
// Calculate overall security score
|
||||
const securityScore = this.calculateSecurityScore({
|
||||
totalEvents,
|
||||
criticalEvents,
|
||||
activeAlerts,
|
||||
topThreats,
|
||||
});
|
||||
|
||||
// Determine threat level
|
||||
const threatLevel = this.determineThreatLevel(
|
||||
securityScore,
|
||||
activeAlerts,
|
||||
criticalEvents
|
||||
);
|
||||
|
||||
return {
|
||||
totalEvents,
|
||||
criticalEvents,
|
||||
activeAlerts,
|
||||
resolvedAlerts,
|
||||
securityScore,
|
||||
threatLevel,
|
||||
eventsByType,
|
||||
alertsByType,
|
||||
topThreats,
|
||||
geoDistribution,
|
||||
timeDistribution,
|
||||
userRiskScores,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active security alerts
|
||||
*/
|
||||
getActiveAlerts(severity?: AlertSeverity): SecurityAlert[] {
|
||||
return this.alerts.filter(
|
||||
(alert) =>
|
||||
!alert.acknowledged && (!severity || alert.severity === severity)
|
||||
);
|
||||
return this.alertManagement.getActiveAlerts(severity);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -302,26 +225,7 @@ class SecurityMonitoringService {
|
||||
alertId: string,
|
||||
acknowledgedBy: string
|
||||
): Promise<boolean> {
|
||||
const alert = this.alerts.find((a) => a.id === alertId);
|
||||
if (!alert) return false;
|
||||
|
||||
alert.acknowledged = true;
|
||||
alert.acknowledgedBy = acknowledgedBy;
|
||||
alert.acknowledgedAt = new Date();
|
||||
|
||||
// Log the acknowledgment
|
||||
await securityAuditLogger.log({
|
||||
eventType: SecurityEventType.SYSTEM_CONFIG,
|
||||
action: "alert_acknowledged",
|
||||
outcome: AuditOutcome.SUCCESS,
|
||||
severity: AuditSeverity.INFO,
|
||||
context: {
|
||||
userId: acknowledgedBy,
|
||||
metadata: { alertId, alertType: alert.type },
|
||||
},
|
||||
});
|
||||
|
||||
return true;
|
||||
return this.alertManagement.acknowledgeAlert(alertId, acknowledgedBy);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -331,45 +235,7 @@ class SecurityMonitoringService {
|
||||
format: "json" | "csv",
|
||||
timeRange: { start: Date; end: Date }
|
||||
): string {
|
||||
const filteredAlerts = this.alerts.filter(
|
||||
(a) => a.timestamp >= timeRange.start && a.timestamp <= timeRange.end
|
||||
);
|
||||
|
||||
if (format === "csv") {
|
||||
const headers = [
|
||||
"timestamp",
|
||||
"severity",
|
||||
"type",
|
||||
"title",
|
||||
"description",
|
||||
"eventType",
|
||||
"userId",
|
||||
"companyId",
|
||||
"ipAddress",
|
||||
"userAgent",
|
||||
"acknowledged",
|
||||
].join(",");
|
||||
|
||||
const rows = filteredAlerts.map((alert) =>
|
||||
[
|
||||
alert.timestamp.toISOString(),
|
||||
alert.severity,
|
||||
alert.type,
|
||||
`"${alert.title}"`,
|
||||
`"${alert.description}"`,
|
||||
alert.eventType,
|
||||
alert.context.userId || "",
|
||||
alert.context.companyId || "",
|
||||
alert.context.ipAddress || "",
|
||||
alert.context.userAgent || "",
|
||||
alert.acknowledged.toString(),
|
||||
].join(",")
|
||||
);
|
||||
|
||||
return [headers, ...rows].join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(filteredAlerts, null, 2);
|
||||
return this.alertManagement.exportAlertsData(format, timeRange);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -419,431 +285,7 @@ class SecurityMonitoringService {
|
||||
recommendations: string[];
|
||||
isBlacklisted: boolean;
|
||||
}> {
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
|
||||
const events = await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
ipAddress,
|
||||
timestamp: { gte: oneDayAgo },
|
||||
},
|
||||
});
|
||||
|
||||
const riskFactors: string[] = [];
|
||||
const recommendations: string[] = [];
|
||||
|
||||
// Failed login attempts
|
||||
const failedLogins = events.filter(
|
||||
(e) =>
|
||||
e.eventType === SecurityEventType.AUTHENTICATION &&
|
||||
e.outcome === AuditOutcome.FAILURE
|
||||
).length;
|
||||
|
||||
if (failedLogins > 10) {
|
||||
riskFactors.push(`${failedLogins} failed login attempts in 24h`);
|
||||
recommendations.push("Consider temporary IP blocking");
|
||||
}
|
||||
|
||||
// Rate limit violations
|
||||
const rateLimitViolations = events.filter(
|
||||
(e) => e.outcome === AuditOutcome.RATE_LIMITED
|
||||
).length;
|
||||
|
||||
if (rateLimitViolations > 5) {
|
||||
riskFactors.push(`${rateLimitViolations} rate limit violations`);
|
||||
recommendations.push("Implement stricter rate limiting");
|
||||
}
|
||||
|
||||
// Multiple user attempts
|
||||
const uniqueUsers = new Set(events.map((e) => e.userId).filter(Boolean))
|
||||
.size;
|
||||
if (uniqueUsers > 5) {
|
||||
riskFactors.push(`Access attempts to ${uniqueUsers} different accounts`);
|
||||
recommendations.push("Investigate for account enumeration");
|
||||
}
|
||||
|
||||
// Determine threat level
|
||||
let threatLevel = ThreatLevel.LOW;
|
||||
if (riskFactors.length >= 3) threatLevel = ThreatLevel.CRITICAL;
|
||||
else if (riskFactors.length >= 2) threatLevel = ThreatLevel.HIGH;
|
||||
else if (riskFactors.length >= 1) threatLevel = ThreatLevel.MODERATE;
|
||||
|
||||
// Ensure we always provide at least basic analysis
|
||||
if (riskFactors.length === 0) {
|
||||
riskFactors.push(`${events.length} security events in 24h`);
|
||||
}
|
||||
|
||||
if (recommendations.length === 0) {
|
||||
recommendations.push("Continue monitoring for suspicious activity");
|
||||
}
|
||||
|
||||
// Simple blacklist check based on threat level and risk factors
|
||||
const isBlacklisted =
|
||||
threatLevel === ThreatLevel.CRITICAL && riskFactors.length >= 3;
|
||||
|
||||
return { threatLevel, riskFactors, recommendations, isBlacklisted };
|
||||
}
|
||||
|
||||
private async detectImediateThreats(
|
||||
eventType: SecurityEventType,
|
||||
outcome: AuditOutcome,
|
||||
context: AuditLogContext,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<Array<Omit<SecurityAlert, "id" | "timestamp" | "acknowledged">>> {
|
||||
const threats: Array<
|
||||
Omit<SecurityAlert, "id" | "timestamp" | "acknowledged">
|
||||
> = [];
|
||||
const now = new Date();
|
||||
|
||||
// Multiple failed logins detection
|
||||
if (
|
||||
eventType === SecurityEventType.AUTHENTICATION &&
|
||||
outcome === AuditOutcome.FAILURE &&
|
||||
context.ipAddress
|
||||
) {
|
||||
const fiveMinutesAgo = new Date(now.getTime() - 5 * 60 * 1000);
|
||||
const recentFailures = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
eventType: SecurityEventType.AUTHENTICATION,
|
||||
outcome: AuditOutcome.FAILURE,
|
||||
ipAddress: context.ipAddress,
|
||||
timestamp: { gte: fiveMinutesAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (recentFailures >= this.config.thresholds.failedLoginsPerMinute) {
|
||||
threats.push({
|
||||
severity: AlertSeverity.HIGH,
|
||||
type: AlertType.BRUTE_FORCE_ATTACK,
|
||||
title: "Brute Force Attack Detected",
|
||||
description: `${recentFailures} failed login attempts from IP ${context.ipAddress} in 5 minutes`,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { failedAttempts: recentFailures, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Suspicious admin activity
|
||||
if (
|
||||
eventType === SecurityEventType.PLATFORM_ADMIN ||
|
||||
(eventType === SecurityEventType.USER_MANAGEMENT && context.userId)
|
||||
) {
|
||||
const oneHourAgo = new Date(now.getTime() - 60 * 60 * 1000);
|
||||
const adminActions = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
userId: context.userId,
|
||||
eventType: {
|
||||
in: [
|
||||
SecurityEventType.PLATFORM_ADMIN,
|
||||
SecurityEventType.USER_MANAGEMENT,
|
||||
],
|
||||
},
|
||||
timestamp: { gte: oneHourAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (adminActions >= this.config.thresholds.adminActionsPerHour) {
|
||||
threats.push({
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.UNUSUAL_ADMIN_ACTIVITY,
|
||||
title: "Unusual Admin Activity",
|
||||
description: `User ${context.userId} performed ${adminActions} admin actions in 1 hour`,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { adminActions, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Rate limiting violations
|
||||
if (outcome === AuditOutcome.RATE_LIMITED && context.ipAddress) {
|
||||
const oneMinuteAgo = new Date(now.getTime() - 60 * 1000);
|
||||
const rateLimitViolations = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
outcome: AuditOutcome.RATE_LIMITED,
|
||||
ipAddress: context.ipAddress,
|
||||
timestamp: { gte: oneMinuteAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (
|
||||
rateLimitViolations >=
|
||||
this.config.thresholds.rateLimitViolationsPerMinute
|
||||
) {
|
||||
threats.push({
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.RATE_LIMIT_BREACH,
|
||||
title: "Rate Limit Breach",
|
||||
description: `IP ${context.ipAddress} exceeded rate limits ${rateLimitViolations} times in 1 minute`,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { violations: rateLimitViolations, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return threats;
|
||||
}
|
||||
|
||||
private async detectAnomalies(
|
||||
eventType: SecurityEventType,
|
||||
context: AuditLogContext
|
||||
): Promise<AnomalyDetectionResult> {
|
||||
// Simple anomaly detection based on historical patterns
|
||||
const now = new Date();
|
||||
const sevenDaysAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// Get historical data for baseline
|
||||
const historicalEvents = await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
eventType,
|
||||
timestamp: { gte: sevenDaysAgo, lt: now },
|
||||
},
|
||||
});
|
||||
|
||||
// Check for unusual time patterns
|
||||
const currentHour = now.getHours();
|
||||
const hourlyEvents = (historicalEvents || []).filter(
|
||||
(e) => e.timestamp.getHours() === currentHour
|
||||
);
|
||||
const avgHourlyEvents = hourlyEvents.length / 7; // 7 days average
|
||||
|
||||
const recentHourEvents = this.eventBuffer.filter(
|
||||
(e) =>
|
||||
e.eventType === eventType &&
|
||||
e.timestamp.getHours() === currentHour &&
|
||||
e.timestamp > new Date(now.getTime() - 60 * 60 * 1000)
|
||||
).length;
|
||||
|
||||
// Check for geographical anomalies
|
||||
if (context.country && context.userId) {
|
||||
const userCountries = new Set(
|
||||
(historicalEvents || [])
|
||||
.filter((e) => e.userId === context.userId && e.country)
|
||||
.map((e) => e.country)
|
||||
);
|
||||
|
||||
if (userCountries.size > 0 && !userCountries.has(context.country)) {
|
||||
return {
|
||||
isAnomaly: true,
|
||||
confidence: 0.8,
|
||||
type: "geographical_anomaly",
|
||||
description: `User accessing from unusual country: ${context.country}`,
|
||||
recommendedActions: [
|
||||
"Verify user identity",
|
||||
"Check for compromised credentials",
|
||||
"Consider additional authentication",
|
||||
],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for time-based anomalies
|
||||
if (recentHourEvents > avgHourlyEvents * 3 && avgHourlyEvents > 0) {
|
||||
return {
|
||||
isAnomaly: true,
|
||||
confidence: 0.7,
|
||||
type: "temporal_anomaly",
|
||||
description: `Unusual activity spike: ${recentHourEvents} events vs ${avgHourlyEvents.toFixed(1)} average`,
|
||||
recommendedActions: [
|
||||
"Investigate source of increased activity",
|
||||
"Check for automated attacks",
|
||||
"Review recent system changes",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
|
||||
private async createAlert(
|
||||
alertData: Omit<SecurityAlert, "id" | "timestamp" | "acknowledged">
|
||||
): Promise<void> {
|
||||
// Check for duplicate suppression
|
||||
const suppressionWindow = new Date(
|
||||
Date.now() - this.config.alerting.suppressDuplicateMinutes * 60 * 1000
|
||||
);
|
||||
const isDuplicate = this.alerts.some(
|
||||
(a) =>
|
||||
a.type === alertData.type &&
|
||||
a.context.ipAddress === alertData.context.ipAddress &&
|
||||
a.timestamp > suppressionWindow
|
||||
);
|
||||
|
||||
if (isDuplicate) return;
|
||||
|
||||
const alert: SecurityAlert = {
|
||||
id: crypto.randomUUID(),
|
||||
timestamp: new Date(),
|
||||
acknowledged: false,
|
||||
...alertData,
|
||||
};
|
||||
|
||||
this.alerts.push(alert);
|
||||
|
||||
// Log alert creation
|
||||
await securityAuditLogger.log({
|
||||
eventType: SecurityEventType.SYSTEM_CONFIG,
|
||||
action: "security_alert_created",
|
||||
outcome: AuditOutcome.SUCCESS,
|
||||
severity: this.mapAlertSeverityToAuditSeverity(alert.severity),
|
||||
context: alert.context,
|
||||
errorMessage: undefined,
|
||||
});
|
||||
|
||||
// Send notifications if enabled
|
||||
if (this.config.alerting.enabled) {
|
||||
await this.sendAlertNotifications(alert);
|
||||
}
|
||||
}
|
||||
|
||||
private async sendAlertNotifications(alert: SecurityAlert): Promise<void> {
|
||||
// In production, integrate with actual notification services
|
||||
console.error(
|
||||
`🚨 SECURITY ALERT [${alert.severity}] ${alert.type}: ${alert.title}`
|
||||
);
|
||||
console.error(`Description: ${alert.description}`);
|
||||
console.error("Context:", alert.context);
|
||||
|
||||
// Example integrations you could implement:
|
||||
// - Email notifications
|
||||
// - Slack webhooks
|
||||
// - PagerDuty alerts
|
||||
// - SMS notifications
|
||||
// - Custom webhook endpoints
|
||||
}
|
||||
|
||||
private async calculateUserRiskScores(
|
||||
events: Array<{
|
||||
userId?: string;
|
||||
user?: { email: string };
|
||||
eventType: SecurityEventType;
|
||||
outcome: AuditOutcome;
|
||||
severity: AuditSeverity;
|
||||
country?: string;
|
||||
}>
|
||||
): Promise<Array<{ userId: string; email: string; riskScore: number }>> {
|
||||
const userEvents = events.filter((e) => e.userId);
|
||||
const userScores = new Map<
|
||||
string,
|
||||
{ email: string; score: number; events: typeof events }
|
||||
>();
|
||||
|
||||
for (const event of userEvents) {
|
||||
if (!userScores.has(event.userId)) {
|
||||
userScores.set(event.userId, {
|
||||
email: event.user?.email || "unknown",
|
||||
score: 0,
|
||||
events: [],
|
||||
});
|
||||
}
|
||||
userScores.get(event.userId)?.events.push(event);
|
||||
}
|
||||
|
||||
const riskScores: Array<{
|
||||
userId: string;
|
||||
email: string;
|
||||
riskScore: number;
|
||||
}> = [];
|
||||
|
||||
for (const [userId, userData] of userScores) {
|
||||
let riskScore = 0;
|
||||
|
||||
// Failed authentication attempts
|
||||
const failedAuth = userData.events.filter(
|
||||
(e) =>
|
||||
e.eventType === SecurityEventType.AUTHENTICATION &&
|
||||
e.outcome === AuditOutcome.FAILURE
|
||||
).length;
|
||||
riskScore += failedAuth * 10;
|
||||
|
||||
// Rate limit violations
|
||||
const rateLimited = userData.events.filter(
|
||||
(e) => e.outcome === AuditOutcome.RATE_LIMITED
|
||||
).length;
|
||||
riskScore += rateLimited * 15;
|
||||
|
||||
// Critical events
|
||||
const criticalEvents = userData.events.filter(
|
||||
(e) => e.severity === AuditSeverity.CRITICAL
|
||||
).length;
|
||||
riskScore += criticalEvents * 25;
|
||||
|
||||
// Multiple countries
|
||||
const countries = new Set(
|
||||
userData.events.map((e) => e.country).filter(Boolean)
|
||||
);
|
||||
if (countries.size > 2) riskScore += 20;
|
||||
|
||||
// Normalize score to 0-100 range
|
||||
riskScore = Math.min(100, riskScore);
|
||||
|
||||
riskScores.push({
|
||||
userId,
|
||||
email: userData.email,
|
||||
riskScore,
|
||||
});
|
||||
}
|
||||
|
||||
return riskScores.sort((a, b) => b.riskScore - a.riskScore).slice(0, 10);
|
||||
}
|
||||
|
||||
private calculateSecurityScore(data: {
|
||||
totalEvents: number;
|
||||
criticalEvents: number;
|
||||
activeAlerts: number;
|
||||
topThreats: Array<{ type: AlertType; count: number }>;
|
||||
}): number {
|
||||
let score = 100;
|
||||
|
||||
// Deduct points for critical events
|
||||
score -= Math.min(30, data.criticalEvents * 2);
|
||||
|
||||
// Deduct points for active alerts
|
||||
score -= Math.min(25, data.activeAlerts * 3);
|
||||
|
||||
// Deduct points for high-severity threats
|
||||
const highSeverityThreats = data.topThreats.filter((t) =>
|
||||
[
|
||||
AlertType.BRUTE_FORCE_ATTACK,
|
||||
AlertType.DATA_BREACH_ATTEMPT,
|
||||
AlertType.PRIVILEGE_ESCALATION,
|
||||
].includes(t.type)
|
||||
);
|
||||
score -= Math.min(
|
||||
20,
|
||||
highSeverityThreats.reduce((sum, t) => sum + t.count, 0) * 5
|
||||
);
|
||||
|
||||
// Deduct points for high event volume (potential attacks)
|
||||
if (data.totalEvents > 1000) {
|
||||
score -= Math.min(15, (data.totalEvents - 1000) / 100);
|
||||
}
|
||||
|
||||
return Math.max(0, Math.round(score));
|
||||
}
|
||||
|
||||
private determineThreatLevel(
|
||||
securityScore: number,
|
||||
activeAlerts: number,
|
||||
criticalEvents: number
|
||||
): ThreatLevel {
|
||||
if (securityScore < 50 || activeAlerts >= 5 || criticalEvents >= 3) {
|
||||
return ThreatLevel.CRITICAL;
|
||||
}
|
||||
if (securityScore < 70 || activeAlerts >= 3 || criticalEvents >= 2) {
|
||||
return ThreatLevel.HIGH;
|
||||
}
|
||||
if (securityScore < 85 || activeAlerts >= 1 || criticalEvents >= 1) {
|
||||
return ThreatLevel.MODERATE;
|
||||
}
|
||||
return ThreatLevel.LOW;
|
||||
return this.metricsService.calculateIPThreatLevel(ipAddress);
|
||||
}
|
||||
|
||||
private mapConfidenceToSeverity(confidence: number): AlertSeverity {
|
||||
@ -853,21 +295,6 @@ class SecurityMonitoringService {
|
||||
return AlertSeverity.LOW;
|
||||
}
|
||||
|
||||
private mapAlertSeverityToAuditSeverity(
|
||||
severity: AlertSeverity
|
||||
): AuditSeverity {
|
||||
switch (severity) {
|
||||
case AlertSeverity.CRITICAL:
|
||||
return AuditSeverity.CRITICAL;
|
||||
case AlertSeverity.HIGH:
|
||||
return AuditSeverity.HIGH;
|
||||
case AlertSeverity.MEDIUM:
|
||||
return AuditSeverity.MEDIUM;
|
||||
case AlertSeverity.LOW:
|
||||
return AuditSeverity.LOW;
|
||||
}
|
||||
}
|
||||
|
||||
private getDefaultConfig(): MonitoringConfig {
|
||||
return {
|
||||
thresholds: {
|
||||
@ -893,47 +320,29 @@ class SecurityMonitoringService {
|
||||
}
|
||||
|
||||
private startBackgroundProcessing(): void {
|
||||
// Clean up old data every hour
|
||||
setInterval(
|
||||
() => {
|
||||
this.cleanupOldData();
|
||||
},
|
||||
60 * 60 * 1000
|
||||
);
|
||||
// Clean up old data every cleanup interval
|
||||
setInterval(() => {
|
||||
this.cleanupOldData();
|
||||
}, SECURITY_MONITORING.EVENT_BUFFER_CLEANUP_INTERVAL);
|
||||
|
||||
// Process event buffer every 30 seconds
|
||||
// Process event buffer for threat detection
|
||||
setInterval(() => {
|
||||
this.processEventBuffer();
|
||||
}, 30 * 1000);
|
||||
}
|
||||
|
||||
private cleanupEventBuffer(): void {
|
||||
const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000);
|
||||
this.eventBuffer = this.eventBuffer.filter(
|
||||
(e) => e.timestamp >= oneHourAgo
|
||||
);
|
||||
}, SECURITY_MONITORING.BACKGROUND_PROCESSING_INTERVAL);
|
||||
}
|
||||
|
||||
private cleanupOldData(): void {
|
||||
const alertCutoff = new Date(
|
||||
Date.now() -
|
||||
this.config.retention.alertRetentionDays * 24 * 60 * 60 * 1000
|
||||
);
|
||||
this.alerts = this.alerts.filter((a) => a.timestamp >= alertCutoff);
|
||||
this.cleanupEventBuffer();
|
||||
this.alertManagement.cleanupOldAlerts();
|
||||
this.eventProcessor.cleanup();
|
||||
}
|
||||
|
||||
private async processEventBuffer(): Promise<void> {
|
||||
// Analyze patterns in event buffer for real-time threat detection
|
||||
const now = new Date();
|
||||
const oneMinuteAgo = new Date(now.getTime() - 60 * 1000);
|
||||
const recentEvents = this.eventBuffer.filter(
|
||||
(e) => e.timestamp >= oneMinuteAgo
|
||||
);
|
||||
const recentEvents = this.eventProcessor.getRecentEvents();
|
||||
|
||||
// Check for event spikes
|
||||
if (recentEvents.length > 50) {
|
||||
await this.createAlert({
|
||||
await this.alertManagement.createAlert({
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.SUSPICIOUS_IP_ACTIVITY,
|
||||
title: "High Event Volume Detected",
|
||||
|
||||
271
lib/services/AlertManagementService.ts
Normal file
271
lib/services/AlertManagementService.ts
Normal file
@ -0,0 +1,271 @@
|
||||
import { TIME } from "../constants";
|
||||
import {
|
||||
AuditOutcome,
|
||||
AuditSeverity,
|
||||
SecurityEventType,
|
||||
securityAuditLogger,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
AlertChannel,
|
||||
AlertSeverity,
|
||||
type MonitoringConfig,
|
||||
type SecurityAlert,
|
||||
} from "../securityMonitoring";
|
||||
|
||||
/**
|
||||
* Handles security alert management and notifications
|
||||
* Single Responsibility: Alert creation, storage, and notifications
|
||||
*/
|
||||
export class AlertManagementService {
|
||||
private alerts: SecurityAlert[] = [];
|
||||
|
||||
constructor(private config: MonitoringConfig) {}
|
||||
|
||||
/**
|
||||
* Create and store a new security alert
|
||||
*/
|
||||
async createAlert(
|
||||
alertData: Omit<SecurityAlert, "id" | "timestamp" | "acknowledged">
|
||||
): Promise<SecurityAlert | null> {
|
||||
// Check for duplicate suppression
|
||||
const suppressionWindow = new Date(
|
||||
Date.now() - this.config.alerting.suppressDuplicateMinutes * 60 * 1000
|
||||
);
|
||||
const isDuplicate = this.alerts.some(
|
||||
(a) =>
|
||||
a.type === alertData.type &&
|
||||
a.context.ipAddress === alertData.context.ipAddress &&
|
||||
a.timestamp > suppressionWindow
|
||||
);
|
||||
|
||||
if (isDuplicate) return null;
|
||||
|
||||
const alert: SecurityAlert = {
|
||||
id: crypto.randomUUID(),
|
||||
timestamp: new Date(),
|
||||
acknowledged: false,
|
||||
...alertData,
|
||||
};
|
||||
|
||||
this.alerts.push(alert);
|
||||
|
||||
// Log alert creation
|
||||
await securityAuditLogger.log({
|
||||
eventType: SecurityEventType.SYSTEM_CONFIG,
|
||||
action: "security_alert_created",
|
||||
outcome: AuditOutcome.SUCCESS,
|
||||
severity: this.mapAlertSeverityToAuditSeverity(alert.severity),
|
||||
context: alert.context,
|
||||
});
|
||||
|
||||
// Send notifications if enabled
|
||||
if (this.config.alerting.enabled) {
|
||||
await this.sendAlertNotifications(alert);
|
||||
}
|
||||
|
||||
return alert;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active security alerts
|
||||
*/
|
||||
getActiveAlerts(severity?: AlertSeverity): SecurityAlert[] {
|
||||
return this.alerts.filter(
|
||||
(alert) =>
|
||||
!alert.acknowledged && (!severity || alert.severity === severity)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all alerts within time range
|
||||
*/
|
||||
getAlertsInTimeRange(timeRange: { start: Date; end: Date }): SecurityAlert[] {
|
||||
return this.alerts.filter(
|
||||
(alert) =>
|
||||
alert.timestamp >= timeRange.start && alert.timestamp <= timeRange.end
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Acknowledge an alert
|
||||
*/
|
||||
async acknowledgeAlert(
|
||||
alertId: string,
|
||||
acknowledgedBy: string
|
||||
): Promise<boolean> {
|
||||
const alert = this.alerts.find((a) => a.id === alertId);
|
||||
if (!alert) return false;
|
||||
|
||||
alert.acknowledged = true;
|
||||
alert.acknowledgedBy = acknowledgedBy;
|
||||
alert.acknowledgedAt = new Date();
|
||||
|
||||
// Log the acknowledgment
|
||||
await securityAuditLogger.log({
|
||||
eventType: SecurityEventType.SYSTEM_CONFIG,
|
||||
action: "alert_acknowledged",
|
||||
outcome: AuditOutcome.SUCCESS,
|
||||
severity: AuditSeverity.INFO,
|
||||
context: {
|
||||
userId: acknowledgedBy,
|
||||
metadata: { alertId, alertType: alert.type },
|
||||
},
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export security alerts for analysis
|
||||
*/
|
||||
exportAlertsData(
|
||||
format: "json" | "csv",
|
||||
timeRange: { start: Date; end: Date }
|
||||
): string {
|
||||
const filteredAlerts = this.getAlertsInTimeRange(timeRange);
|
||||
|
||||
if (format === "csv") {
|
||||
const headers = [
|
||||
"timestamp",
|
||||
"severity",
|
||||
"type",
|
||||
"title",
|
||||
"description",
|
||||
"eventType",
|
||||
"userId",
|
||||
"companyId",
|
||||
"ipAddress",
|
||||
"userAgent",
|
||||
"acknowledged",
|
||||
].join(",");
|
||||
|
||||
const rows = filteredAlerts.map((alert) =>
|
||||
[
|
||||
alert.timestamp.toISOString(),
|
||||
alert.severity,
|
||||
alert.type,
|
||||
`"${alert.title}"`,
|
||||
`"${alert.description}"`,
|
||||
alert.eventType,
|
||||
alert.context.userId || "",
|
||||
alert.context.companyId || "",
|
||||
alert.context.ipAddress || "",
|
||||
alert.context.userAgent || "",
|
||||
alert.acknowledged.toString(),
|
||||
].join(",")
|
||||
);
|
||||
|
||||
return [headers, ...rows].join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(filteredAlerts, null, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old alerts based on retention policy
|
||||
*/
|
||||
cleanupOldAlerts(): void {
|
||||
const alertCutoff = new Date(
|
||||
Date.now() - this.config.retention.alertRetentionDays * TIME.DAY
|
||||
);
|
||||
this.alerts = this.alerts.filter((a) => a.timestamp >= alertCutoff);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get alert statistics
|
||||
*/
|
||||
getAlertStats(): {
|
||||
total: number;
|
||||
active: number;
|
||||
acknowledged: number;
|
||||
bySeverity: Record<AlertSeverity, number>;
|
||||
} {
|
||||
const bySeverity = this.alerts.reduce(
|
||||
(acc, alert) => {
|
||||
acc[alert.severity] = (acc[alert.severity] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AlertSeverity, number>
|
||||
);
|
||||
|
||||
return {
|
||||
total: this.alerts.length,
|
||||
active: this.alerts.filter((a) => !a.acknowledged).length,
|
||||
acknowledged: this.alerts.filter((a) => a.acknowledged).length,
|
||||
bySeverity,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Send alert notifications via configured channels
|
||||
*/
|
||||
private async sendAlertNotifications(alert: SecurityAlert): Promise<void> {
|
||||
// Console logging for immediate visibility
|
||||
console.error(
|
||||
`🚨 SECURITY ALERT [${alert.severity}] ${alert.type}: ${alert.title}`
|
||||
);
|
||||
console.error(`Description: ${alert.description}`);
|
||||
console.error("Context:", alert.context);
|
||||
|
||||
// In production, implement actual notification integrations:
|
||||
for (const channel of this.config.alerting.channels) {
|
||||
switch (channel) {
|
||||
case AlertChannel.EMAIL:
|
||||
await this.sendEmailNotification(alert);
|
||||
break;
|
||||
case AlertChannel.SLACK:
|
||||
await this.sendSlackNotification(alert);
|
||||
break;
|
||||
case AlertChannel.WEBHOOK:
|
||||
await this.sendWebhookNotification(alert);
|
||||
break;
|
||||
case AlertChannel.DISCORD:
|
||||
await this.sendDiscordNotification(alert);
|
||||
break;
|
||||
case AlertChannel.PAGERDUTY:
|
||||
await this.sendPagerDutyNotification(alert);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async sendEmailNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement email notification
|
||||
console.log(`[EMAIL] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendSlackNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement Slack webhook notification
|
||||
console.log(`[SLACK] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendWebhookNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement custom webhook notification
|
||||
console.log(`[WEBHOOK] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendDiscordNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement Discord webhook notification
|
||||
console.log(`[DISCORD] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private async sendPagerDutyNotification(alert: SecurityAlert): Promise<void> {
|
||||
// Implement PagerDuty API notification
|
||||
console.log(`[PAGERDUTY] Security alert: ${alert.title}`);
|
||||
}
|
||||
|
||||
private mapAlertSeverityToAuditSeverity(
|
||||
severity: AlertSeverity
|
||||
): AuditSeverity {
|
||||
switch (severity) {
|
||||
case AlertSeverity.CRITICAL:
|
||||
return AuditSeverity.CRITICAL;
|
||||
case AlertSeverity.HIGH:
|
||||
return AuditSeverity.HIGH;
|
||||
case AlertSeverity.MEDIUM:
|
||||
return AuditSeverity.MEDIUM;
|
||||
case AlertSeverity.LOW:
|
||||
return AuditSeverity.LOW;
|
||||
}
|
||||
}
|
||||
}
|
||||
319
lib/services/ErrorHandlingService.ts
Normal file
319
lib/services/ErrorHandlingService.ts
Normal file
@ -0,0 +1,319 @@
|
||||
/**
|
||||
* Centralized error handling service
|
||||
* Provides consistent error handling patterns across the application
|
||||
*/
|
||||
|
||||
import { DATABASE, SCHEDULER } from "../constants";
|
||||
|
||||
export interface ErrorContext {
|
||||
operation: string;
|
||||
component: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
userId?: string;
|
||||
companyId?: string;
|
||||
}
|
||||
|
||||
export interface RetryConfig {
|
||||
maxAttempts: number;
|
||||
baseDelay: number;
|
||||
maxDelay: number;
|
||||
backoffMultiplier: number;
|
||||
jitter: boolean;
|
||||
}
|
||||
|
||||
export class ErrorHandlingService {
|
||||
private static instance: ErrorHandlingService;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): ErrorHandlingService {
|
||||
if (!ErrorHandlingService.instance) {
|
||||
ErrorHandlingService.instance = new ErrorHandlingService();
|
||||
}
|
||||
return ErrorHandlingService.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute operation with standardized error handling and retry logic
|
||||
*/
|
||||
async executeWithRetry<T>(
|
||||
operation: () => Promise<T>,
|
||||
context: ErrorContext,
|
||||
retryConfig: Partial<RetryConfig> = {}
|
||||
): Promise<T> {
|
||||
const config: RetryConfig = {
|
||||
maxAttempts: DATABASE.MAX_RETRY_ATTEMPTS,
|
||||
baseDelay: DATABASE.RETRY_DELAY_BASE,
|
||||
maxDelay: DATABASE.RETRY_DELAY_MAX,
|
||||
backoffMultiplier: 2,
|
||||
jitter: true,
|
||||
...retryConfig,
|
||||
};
|
||||
|
||||
let lastError: Error;
|
||||
|
||||
for (let attempt = 1; attempt <= config.maxAttempts; attempt++) {
|
||||
try {
|
||||
const result = await operation();
|
||||
|
||||
// Log successful retry if previous attempts failed
|
||||
if (attempt > 1) {
|
||||
console.info(
|
||||
`${context.component}.${context.operation} succeeded on attempt ${attempt}`,
|
||||
{
|
||||
context,
|
||||
attempt,
|
||||
maxAttempts: config.maxAttempts,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
const isLastAttempt = attempt === config.maxAttempts;
|
||||
const shouldRetry = this.shouldRetry(
|
||||
lastError,
|
||||
attempt,
|
||||
config.maxAttempts
|
||||
);
|
||||
|
||||
if (isLastAttempt || !shouldRetry) {
|
||||
this.logError(lastError, context, {
|
||||
attempt,
|
||||
maxAttempts: config.maxAttempts,
|
||||
finalFailure: true,
|
||||
});
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
// Log retry attempt
|
||||
this.logError(lastError, context, {
|
||||
attempt,
|
||||
maxAttempts: config.maxAttempts,
|
||||
willRetry: true,
|
||||
});
|
||||
|
||||
// Wait before retry with exponential backoff and jitter
|
||||
const delay = this.calculateDelay(attempt, config);
|
||||
await this.sleep(delay);
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute scheduler operation with standardized error handling
|
||||
*/
|
||||
async executeSchedulerOperation<T>(
|
||||
operation: () => Promise<T>,
|
||||
schedulerName: string,
|
||||
operationName: string,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<T> {
|
||||
const context: ErrorContext = {
|
||||
operation: operationName,
|
||||
component: `scheduler.${schedulerName}`,
|
||||
metadata,
|
||||
};
|
||||
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
const result = await this.executeWithRetry(operation, context);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Log successful operation
|
||||
console.debug(
|
||||
`Scheduler operation completed: ${schedulerName}.${operationName}`,
|
||||
{
|
||||
duration,
|
||||
metadata,
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Final error logging with enhanced context
|
||||
this.logSchedulerError(
|
||||
error as Error,
|
||||
schedulerName,
|
||||
operationName,
|
||||
metadata
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute API operation with timeout and error handling
|
||||
*/
|
||||
async executeApiOperation<T>(
|
||||
operation: () => Promise<T>,
|
||||
apiName: string,
|
||||
operationName: string,
|
||||
timeoutMs: number = SCHEDULER.MAX_PROCESSING_TIME,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<T> {
|
||||
const context: ErrorContext = {
|
||||
operation: operationName,
|
||||
component: `api.${apiName}`,
|
||||
metadata,
|
||||
};
|
||||
|
||||
return this.executeWithRetry(
|
||||
() => this.withTimeout(operation(), timeoutMs),
|
||||
context
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log error with consistent format and context
|
||||
*/
|
||||
private logError(
|
||||
error: Error,
|
||||
context: ErrorContext,
|
||||
additionalInfo?: Record<string, unknown>
|
||||
): void {
|
||||
const errorInfo = {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
name: error.name,
|
||||
context,
|
||||
timestamp: new Date().toISOString(),
|
||||
...additionalInfo,
|
||||
};
|
||||
|
||||
console.error(
|
||||
`Error in ${context.component}.${context.operation}:`,
|
||||
errorInfo
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log scheduler-specific errors with enhanced context
|
||||
*/
|
||||
private logSchedulerError(
|
||||
error: Error,
|
||||
schedulerName: string,
|
||||
operationName: string,
|
||||
metadata?: Record<string, unknown>
|
||||
): void {
|
||||
console.error(
|
||||
`Scheduler ${schedulerName} failed during ${operationName}:`,
|
||||
{
|
||||
error: {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
name: error.name,
|
||||
},
|
||||
scheduler: schedulerName,
|
||||
operation: operationName,
|
||||
metadata,
|
||||
timestamp: new Date().toISOString(),
|
||||
severity: "ERROR",
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if error is retryable
|
||||
*/
|
||||
private shouldRetry(
|
||||
error: Error,
|
||||
attempt: number,
|
||||
maxAttempts: number
|
||||
): boolean {
|
||||
if (attempt >= maxAttempts) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't retry certain types of errors
|
||||
const nonRetryableErrors = [
|
||||
"ValidationError",
|
||||
"AuthenticationError",
|
||||
"AuthorizationError",
|
||||
"NotFoundError",
|
||||
"BadRequestError",
|
||||
];
|
||||
|
||||
if (nonRetryableErrors.includes(error.name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't retry if error message indicates non-retryable condition
|
||||
const nonRetryableMessages = [
|
||||
"invalid input",
|
||||
"unauthorized",
|
||||
"forbidden",
|
||||
"not found",
|
||||
"bad request",
|
||||
];
|
||||
|
||||
const errorMessage = error.message.toLowerCase();
|
||||
if (nonRetryableMessages.some((msg) => errorMessage.includes(msg))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate delay with exponential backoff and jitter
|
||||
*/
|
||||
private calculateDelay(attempt: number, config: RetryConfig): number {
|
||||
const exponentialDelay =
|
||||
config.baseDelay * config.backoffMultiplier ** (attempt - 1);
|
||||
const cappedDelay = Math.min(exponentialDelay, config.maxDelay);
|
||||
|
||||
if (!config.jitter) {
|
||||
return cappedDelay;
|
||||
}
|
||||
|
||||
// Add jitter: ±25% of the delay
|
||||
const jitterRange = cappedDelay * 0.25;
|
||||
const jitter = (Math.random() - 0.5) * 2 * jitterRange;
|
||||
|
||||
return Math.max(0, cappedDelay + jitter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add timeout to a promise
|
||||
*/
|
||||
private withTimeout<T>(promise: Promise<T>, timeoutMs: number): Promise<T> {
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error(`Operation timed out after ${timeoutMs}ms`));
|
||||
}, timeoutMs);
|
||||
});
|
||||
|
||||
return Promise.race([promise, timeoutPromise]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sleep for specified duration
|
||||
*/
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create error with enhanced context
|
||||
*/
|
||||
createError(
|
||||
message: string,
|
||||
context: ErrorContext,
|
||||
originalError?: Error
|
||||
): Error {
|
||||
const enhancedMessage = `${context.component}.${context.operation}: ${message}`;
|
||||
const error = new Error(enhancedMessage);
|
||||
|
||||
if (originalError) {
|
||||
error.stack = originalError.stack;
|
||||
error.cause = originalError;
|
||||
}
|
||||
|
||||
return error;
|
||||
}
|
||||
}
|
||||
87
lib/services/SecurityEventProcessor.ts
Normal file
87
lib/services/SecurityEventProcessor.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import { SECURITY_MONITORING, TIME } from "../constants";
|
||||
import {
|
||||
type AuditLogContext,
|
||||
type AuditOutcome,
|
||||
AuditSeverity,
|
||||
type SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import { BoundedBuffer } from "../utils/BoundedBuffer";
|
||||
|
||||
export interface SecurityEventData {
|
||||
timestamp: Date;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
outcome: AuditOutcome;
|
||||
severity: AuditSeverity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles security event processing and buffering
|
||||
* Single Responsibility: Event collection and storage
|
||||
*/
|
||||
export class SecurityEventProcessor {
|
||||
private eventBuffer: BoundedBuffer<SecurityEventData>;
|
||||
|
||||
constructor() {
|
||||
this.eventBuffer = new BoundedBuffer<SecurityEventData>({
|
||||
maxSize: SECURITY_MONITORING.EVENT_BUFFER_MAX_SIZE,
|
||||
retentionTime: SECURITY_MONITORING.EVENT_RETENTION_HOURS * TIME.HOUR,
|
||||
cleanupThreshold: 0.9,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add security event to buffer
|
||||
*/
|
||||
addEvent(
|
||||
eventType: SecurityEventType,
|
||||
outcome: AuditOutcome,
|
||||
context: AuditLogContext,
|
||||
severity: AuditSeverity = AuditSeverity.INFO
|
||||
): void {
|
||||
this.eventBuffer.push({
|
||||
timestamp: new Date(),
|
||||
eventType,
|
||||
context,
|
||||
outcome,
|
||||
severity,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get events within time range
|
||||
*/
|
||||
getEventsWithinTime(timeRangeMs: number): SecurityEventData[] {
|
||||
return this.eventBuffer.getWithinTime(timeRangeMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent events for analysis
|
||||
*/
|
||||
getRecentEvents(): SecurityEventData[] {
|
||||
return this.eventBuffer.getWithinTime(
|
||||
SECURITY_MONITORING.THREAT_DETECTION_WINDOW
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual cleanup of old events
|
||||
*/
|
||||
cleanup(): void {
|
||||
this.eventBuffer.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current buffer statistics
|
||||
*/
|
||||
getStats(): {
|
||||
bufferSize: number;
|
||||
eventsCount: number;
|
||||
} {
|
||||
const recentEvents = this.getRecentEvents();
|
||||
return {
|
||||
bufferSize: SECURITY_MONITORING.EVENT_BUFFER_MAX_SIZE,
|
||||
eventsCount: recentEvents.length,
|
||||
};
|
||||
}
|
||||
}
|
||||
338
lib/services/SecurityMetricsService.ts
Normal file
338
lib/services/SecurityMetricsService.ts
Normal file
@ -0,0 +1,338 @@
|
||||
import { getSecurityAuditLogRepository } from "../repositories/RepositoryFactory";
|
||||
import {
|
||||
AuditOutcome,
|
||||
AuditSeverity,
|
||||
SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
AlertType,
|
||||
type SecurityAlert,
|
||||
type SecurityMetrics,
|
||||
ThreatLevel,
|
||||
} from "../securityMonitoring";
|
||||
|
||||
/**
|
||||
* Handles security metrics calculation and reporting
|
||||
* Single Responsibility: Metrics computation and data analysis
|
||||
*/
|
||||
export class SecurityMetricsService {
|
||||
/**
|
||||
* Calculate comprehensive security metrics for a time range
|
||||
*/
|
||||
async calculateSecurityMetrics(
|
||||
timeRange: { start: Date; end: Date },
|
||||
companyId?: string,
|
||||
alerts: SecurityAlert[] = []
|
||||
): Promise<SecurityMetrics> {
|
||||
const auditRepository = getSecurityAuditLogRepository();
|
||||
|
||||
// Get security analytics using repository
|
||||
const analytics = await auditRepository.getSecurityAnalytics(
|
||||
timeRange.start,
|
||||
timeRange.end,
|
||||
companyId
|
||||
);
|
||||
|
||||
// Get additional audit log data for user risk calculations
|
||||
const events = await auditRepository.findMany({
|
||||
where: {
|
||||
timestamp: {
|
||||
gte: timeRange.start,
|
||||
lte: timeRange.end,
|
||||
},
|
||||
...(companyId && { companyId }),
|
||||
},
|
||||
});
|
||||
|
||||
// Use analytics data from repository
|
||||
const totalEvents = analytics.totalEvents;
|
||||
const criticalEvents =
|
||||
analytics.eventsBySeverity[AuditSeverity.CRITICAL] || 0;
|
||||
|
||||
const activeAlerts = alerts.filter((a) => !a.acknowledged).length;
|
||||
const resolvedAlerts = alerts.filter((a) => a.acknowledged).length;
|
||||
|
||||
// Alert distribution by type
|
||||
const alertsByType = alerts.reduce(
|
||||
(acc, alert) => {
|
||||
acc[alert.type] = (acc[alert.type] || 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<AlertType, number>
|
||||
);
|
||||
|
||||
// Top threats from alerts
|
||||
const topThreats = Object.entries(alertsByType)
|
||||
.map(([type, count]) => ({ type: type as AlertType, count }))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 5);
|
||||
|
||||
// User risk scores
|
||||
const userRiskScores = await this.calculateUserRiskScores(events);
|
||||
|
||||
// Calculate overall security score
|
||||
const securityScore = this.calculateSecurityScore({
|
||||
totalEvents,
|
||||
criticalEvents,
|
||||
activeAlerts,
|
||||
topThreats,
|
||||
});
|
||||
|
||||
// Determine threat level
|
||||
const threatLevel = this.determineThreatLevel(
|
||||
securityScore,
|
||||
activeAlerts,
|
||||
criticalEvents
|
||||
);
|
||||
|
||||
return {
|
||||
totalEvents,
|
||||
criticalEvents,
|
||||
activeAlerts,
|
||||
resolvedAlerts,
|
||||
securityScore,
|
||||
threatLevel,
|
||||
eventsByType: analytics.eventsByType,
|
||||
alertsByType,
|
||||
topThreats,
|
||||
geoDistribution: analytics.geoDistribution,
|
||||
timeDistribution: analytics.hourlyDistribution,
|
||||
userRiskScores,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate risk scores for users based on their security events
|
||||
*/
|
||||
async calculateUserRiskScores(
|
||||
events: Array<{
|
||||
userId?: string;
|
||||
user?: { email: string };
|
||||
eventType: SecurityEventType;
|
||||
outcome: AuditOutcome;
|
||||
severity: AuditSeverity;
|
||||
country?: string;
|
||||
}>
|
||||
): Promise<Array<{ userId: string; email: string; riskScore: number }>> {
|
||||
const userEvents = events.filter((e) => e.userId);
|
||||
const userScores = new Map<
|
||||
string,
|
||||
{ email: string; score: number; events: typeof events }
|
||||
>();
|
||||
|
||||
for (const event of userEvents) {
|
||||
if (!userScores.has(event.userId)) {
|
||||
userScores.set(event.userId, {
|
||||
email: event.user?.email || "unknown",
|
||||
score: 0,
|
||||
events: [],
|
||||
});
|
||||
}
|
||||
userScores.get(event.userId)?.events.push(event);
|
||||
}
|
||||
|
||||
const riskScores: Array<{
|
||||
userId: string;
|
||||
email: string;
|
||||
riskScore: number;
|
||||
}> = [];
|
||||
|
||||
for (const [userId, userData] of userScores) {
|
||||
let riskScore = 0;
|
||||
|
||||
// Failed authentication attempts
|
||||
const failedAuth = userData.events.filter(
|
||||
(e) =>
|
||||
e.eventType === SecurityEventType.AUTHENTICATION &&
|
||||
e.outcome === AuditOutcome.FAILURE
|
||||
).length;
|
||||
riskScore += failedAuth * 10;
|
||||
|
||||
// Rate limit violations
|
||||
const rateLimited = userData.events.filter(
|
||||
(e) => e.outcome === AuditOutcome.RATE_LIMITED
|
||||
).length;
|
||||
riskScore += rateLimited * 15;
|
||||
|
||||
// Critical events
|
||||
const criticalEvents = userData.events.filter(
|
||||
(e) => e.severity === AuditSeverity.CRITICAL
|
||||
).length;
|
||||
riskScore += criticalEvents * 25;
|
||||
|
||||
// Multiple countries
|
||||
const countries = new Set(
|
||||
userData.events.map((e) => e.country).filter(Boolean)
|
||||
);
|
||||
if (countries.size > 2) riskScore += 20;
|
||||
|
||||
// Normalize score to 0-100 range
|
||||
riskScore = Math.min(100, riskScore);
|
||||
|
||||
riskScores.push({
|
||||
userId,
|
||||
email: userData.email,
|
||||
riskScore,
|
||||
});
|
||||
}
|
||||
|
||||
return riskScores.sort((a, b) => b.riskScore - a.riskScore).slice(0, 10);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate threat level for a specific IP address
|
||||
*/
|
||||
async calculateIPThreatLevel(ipAddress: string): Promise<{
|
||||
threatLevel: ThreatLevel;
|
||||
riskFactors: string[];
|
||||
recommendations: string[];
|
||||
isBlacklisted: boolean;
|
||||
}> {
|
||||
const auditRepository = getSecurityAuditLogRepository();
|
||||
|
||||
// Get IP activity summary using repository
|
||||
const activitySummary = await auditRepository.getIPActivitySummary(
|
||||
ipAddress,
|
||||
24
|
||||
);
|
||||
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const _events = await auditRepository.findByIPAddress(ipAddress, oneDayAgo);
|
||||
|
||||
const riskFactors: string[] = [];
|
||||
const recommendations: string[] = [];
|
||||
|
||||
// Use activity summary data from repository
|
||||
const {
|
||||
failedLogins,
|
||||
rateLimitViolations,
|
||||
uniqueUsersTargeted,
|
||||
totalEvents,
|
||||
} = activitySummary;
|
||||
|
||||
if (failedLogins > 10) {
|
||||
riskFactors.push(`${failedLogins} failed login attempts in 24h`);
|
||||
recommendations.push("Consider temporary IP blocking");
|
||||
}
|
||||
|
||||
if (rateLimitViolations > 5) {
|
||||
riskFactors.push(`${rateLimitViolations} rate limit violations`);
|
||||
recommendations.push("Implement stricter rate limiting");
|
||||
}
|
||||
|
||||
if (uniqueUsersTargeted > 5) {
|
||||
riskFactors.push(
|
||||
`Access attempts to ${uniqueUsersTargeted} different accounts`
|
||||
);
|
||||
recommendations.push("Investigate for account enumeration");
|
||||
}
|
||||
|
||||
// Determine threat level
|
||||
let threatLevel = ThreatLevel.LOW;
|
||||
if (riskFactors.length >= 3) threatLevel = ThreatLevel.CRITICAL;
|
||||
else if (riskFactors.length >= 2) threatLevel = ThreatLevel.HIGH;
|
||||
else if (riskFactors.length >= 1) threatLevel = ThreatLevel.MODERATE;
|
||||
|
||||
// Ensure we always provide at least basic analysis
|
||||
if (riskFactors.length === 0) {
|
||||
riskFactors.push(`${totalEvents} security events in 24h`);
|
||||
}
|
||||
|
||||
if (recommendations.length === 0) {
|
||||
recommendations.push("Continue monitoring for suspicious activity");
|
||||
}
|
||||
|
||||
// Simple blacklist check based on threat level and risk factors
|
||||
const isBlacklisted =
|
||||
threatLevel === ThreatLevel.CRITICAL && riskFactors.length >= 3;
|
||||
|
||||
return { threatLevel, riskFactors, recommendations, isBlacklisted };
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall security score based on various factors
|
||||
*/
|
||||
private calculateSecurityScore(data: {
|
||||
totalEvents: number;
|
||||
criticalEvents: number;
|
||||
activeAlerts: number;
|
||||
topThreats: Array<{ type: AlertType; count: number }>;
|
||||
}): number {
|
||||
let score = 100;
|
||||
|
||||
// Deduct points for critical events
|
||||
score -= Math.min(30, data.criticalEvents * 2);
|
||||
|
||||
// Deduct points for active alerts
|
||||
score -= Math.min(25, data.activeAlerts * 3);
|
||||
|
||||
// Deduct points for high-severity threats
|
||||
const highSeverityThreats = data.topThreats.filter((t) =>
|
||||
[
|
||||
AlertType.BRUTE_FORCE_ATTACK,
|
||||
AlertType.DATA_BREACH_ATTEMPT,
|
||||
AlertType.PRIVILEGE_ESCALATION,
|
||||
].includes(t.type)
|
||||
);
|
||||
score -= Math.min(
|
||||
20,
|
||||
highSeverityThreats.reduce((sum, t) => sum + t.count, 0) * 5
|
||||
);
|
||||
|
||||
// Deduct points for high event volume (potential attacks)
|
||||
if (data.totalEvents > 1000) {
|
||||
score -= Math.min(15, (data.totalEvents - 1000) / 100);
|
||||
}
|
||||
|
||||
return Math.max(0, Math.round(score));
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine overall threat level based on security metrics
|
||||
*/
|
||||
private determineThreatLevel(
|
||||
securityScore: number,
|
||||
activeAlerts: number,
|
||||
criticalEvents: number
|
||||
): ThreatLevel {
|
||||
if (securityScore < 50 || activeAlerts >= 5 || criticalEvents >= 3) {
|
||||
return ThreatLevel.CRITICAL;
|
||||
}
|
||||
if (securityScore < 70 || activeAlerts >= 3 || criticalEvents >= 2) {
|
||||
return ThreatLevel.HIGH;
|
||||
}
|
||||
if (securityScore < 85 || activeAlerts >= 1 || criticalEvents >= 1) {
|
||||
return ThreatLevel.MODERATE;
|
||||
}
|
||||
return ThreatLevel.LOW;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get security score trend over time
|
||||
*/
|
||||
async getSecurityScoreTrend(
|
||||
days: number,
|
||||
companyId?: string
|
||||
): Promise<Array<{ date: Date; score: number }>> {
|
||||
const trends: Array<{ date: Date; score: number }> = [];
|
||||
const now = new Date();
|
||||
|
||||
for (let i = days - 1; i >= 0; i--) {
|
||||
const date = new Date(now.getTime() - i * 24 * 60 * 60 * 1000);
|
||||
const startOfDay = new Date(date.setHours(0, 0, 0, 0));
|
||||
const endOfDay = new Date(date.setHours(23, 59, 59, 999));
|
||||
|
||||
const metrics = await this.calculateSecurityMetrics(
|
||||
{ start: startOfDay, end: endOfDay },
|
||||
companyId
|
||||
);
|
||||
|
||||
trends.push({
|
||||
date: startOfDay,
|
||||
score: metrics.securityScore,
|
||||
});
|
||||
}
|
||||
|
||||
return trends;
|
||||
}
|
||||
}
|
||||
316
lib/services/ThreatDetectionService.ts
Normal file
316
lib/services/ThreatDetectionService.ts
Normal file
@ -0,0 +1,316 @@
|
||||
import { prisma } from "../prisma";
|
||||
import {
|
||||
type AuditLogContext,
|
||||
AuditOutcome,
|
||||
SecurityEventType,
|
||||
} from "../securityAuditLogger";
|
||||
import {
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
type MonitoringConfig,
|
||||
} from "../securityMonitoring";
|
||||
import type { SecurityEventData } from "./SecurityEventProcessor";
|
||||
|
||||
export interface ThreatDetectionResult {
|
||||
threats: Array<{
|
||||
severity: AlertSeverity;
|
||||
type: AlertType;
|
||||
title: string;
|
||||
description: string;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
metadata: Record<string, unknown>;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface AnomalyDetectionResult {
|
||||
isAnomaly: boolean;
|
||||
confidence: number;
|
||||
type: string;
|
||||
description: string;
|
||||
recommendedActions: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles security threat detection and anomaly analysis
|
||||
* Single Responsibility: Threat identification and risk assessment
|
||||
*/
|
||||
export class ThreatDetectionService {
|
||||
constructor(private config: MonitoringConfig) {}
|
||||
|
||||
/**
|
||||
* Detect immediate threats from security event
|
||||
*/
|
||||
async detectImmediateThreats(
|
||||
eventType: SecurityEventType,
|
||||
outcome: AuditOutcome,
|
||||
context: AuditLogContext,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<ThreatDetectionResult> {
|
||||
const threats: Array<{
|
||||
severity: AlertSeverity;
|
||||
type: AlertType;
|
||||
title: string;
|
||||
description: string;
|
||||
eventType: SecurityEventType;
|
||||
context: AuditLogContext;
|
||||
metadata: Record<string, unknown>;
|
||||
}> = [];
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Multiple failed logins detection
|
||||
if (
|
||||
eventType === SecurityEventType.AUTHENTICATION &&
|
||||
outcome === AuditOutcome.FAILURE &&
|
||||
context.ipAddress
|
||||
) {
|
||||
const threatResult = await this.detectBruteForceAttack(
|
||||
context.ipAddress,
|
||||
now
|
||||
);
|
||||
if (threatResult) {
|
||||
threats.push({
|
||||
...threatResult,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { ...threatResult.metadata, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Suspicious admin activity
|
||||
if (
|
||||
eventType === SecurityEventType.PLATFORM_ADMIN ||
|
||||
(eventType === SecurityEventType.USER_MANAGEMENT && context.userId)
|
||||
) {
|
||||
const threatResult = await this.detectSuspiciousAdminActivity(
|
||||
context.userId!,
|
||||
now
|
||||
);
|
||||
if (threatResult) {
|
||||
threats.push({
|
||||
...threatResult,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { ...threatResult.metadata, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Rate limiting violations
|
||||
if (outcome === AuditOutcome.RATE_LIMITED && context.ipAddress) {
|
||||
const threatResult = await this.detectRateLimitBreach(
|
||||
context.ipAddress,
|
||||
now
|
||||
);
|
||||
if (threatResult) {
|
||||
threats.push({
|
||||
...threatResult,
|
||||
eventType,
|
||||
context,
|
||||
metadata: { ...threatResult.metadata, ...metadata },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { threats };
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect anomalies in security events
|
||||
*/
|
||||
async detectAnomalies(
|
||||
eventType: SecurityEventType,
|
||||
context: AuditLogContext,
|
||||
eventBuffer: SecurityEventData[]
|
||||
): Promise<AnomalyDetectionResult> {
|
||||
const now = new Date();
|
||||
const sevenDaysAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// Get historical data for baseline
|
||||
const historicalEvents = await prisma.securityAuditLog.findMany({
|
||||
where: {
|
||||
eventType,
|
||||
timestamp: { gte: sevenDaysAgo, lt: now },
|
||||
},
|
||||
});
|
||||
|
||||
// Check for geographical anomalies
|
||||
if (context.country && context.userId) {
|
||||
const geoAnomaly = this.checkGeographicalAnomaly(
|
||||
context.userId,
|
||||
context.country,
|
||||
historicalEvents
|
||||
);
|
||||
if (geoAnomaly.isAnomaly) return geoAnomaly;
|
||||
}
|
||||
|
||||
// Check for time-based anomalies
|
||||
const timeAnomaly = this.checkTemporalAnomaly(
|
||||
eventType,
|
||||
now,
|
||||
historicalEvents,
|
||||
eventBuffer
|
||||
);
|
||||
if (timeAnomaly.isAnomaly) return timeAnomaly;
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
|
||||
private async detectBruteForceAttack(ipAddress: string, now: Date) {
|
||||
const fiveMinutesAgo = new Date(now.getTime() - 5 * 60 * 1000);
|
||||
const recentFailures = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
eventType: SecurityEventType.AUTHENTICATION,
|
||||
outcome: AuditOutcome.FAILURE,
|
||||
ipAddress,
|
||||
timestamp: { gte: fiveMinutesAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (recentFailures >= this.config.thresholds.failedLoginsPerMinute) {
|
||||
return {
|
||||
severity: AlertSeverity.HIGH,
|
||||
type: AlertType.BRUTE_FORCE_ATTACK,
|
||||
title: "Brute Force Attack Detected",
|
||||
description: `${recentFailures} failed login attempts from IP ${ipAddress} in 5 minutes`,
|
||||
metadata: { failedAttempts: recentFailures },
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private async detectSuspiciousAdminActivity(userId: string, now: Date) {
|
||||
const oneHourAgo = new Date(now.getTime() - 60 * 60 * 1000);
|
||||
const adminActions = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
userId,
|
||||
eventType: {
|
||||
in: [
|
||||
SecurityEventType.PLATFORM_ADMIN,
|
||||
SecurityEventType.USER_MANAGEMENT,
|
||||
],
|
||||
},
|
||||
timestamp: { gte: oneHourAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (adminActions >= this.config.thresholds.adminActionsPerHour) {
|
||||
return {
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.UNUSUAL_ADMIN_ACTIVITY,
|
||||
title: "Unusual Admin Activity",
|
||||
description: `User ${userId} performed ${adminActions} admin actions in 1 hour`,
|
||||
metadata: { adminActions },
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private async detectRateLimitBreach(ipAddress: string, now: Date) {
|
||||
const oneMinuteAgo = new Date(now.getTime() - 60 * 1000);
|
||||
const rateLimitViolations = await prisma.securityAuditLog.count({
|
||||
where: {
|
||||
outcome: AuditOutcome.RATE_LIMITED,
|
||||
ipAddress,
|
||||
timestamp: { gte: oneMinuteAgo },
|
||||
},
|
||||
});
|
||||
|
||||
if (
|
||||
rateLimitViolations >= this.config.thresholds.rateLimitViolationsPerMinute
|
||||
) {
|
||||
return {
|
||||
severity: AlertSeverity.MEDIUM,
|
||||
type: AlertType.RATE_LIMIT_BREACH,
|
||||
title: "Rate Limit Breach",
|
||||
description: `IP ${ipAddress} exceeded rate limits ${rateLimitViolations} times in 1 minute`,
|
||||
metadata: { violations: rateLimitViolations },
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private checkGeographicalAnomaly(
|
||||
userId: string,
|
||||
country: string,
|
||||
historicalEvents: Array<{ userId?: string; country?: string }>
|
||||
): AnomalyDetectionResult {
|
||||
const userCountries = new Set(
|
||||
historicalEvents
|
||||
.filter((e) => e.userId === userId && e.country)
|
||||
.map((e) => e.country)
|
||||
);
|
||||
|
||||
if (userCountries.size > 0 && !userCountries.has(country)) {
|
||||
return {
|
||||
isAnomaly: true,
|
||||
confidence: 0.8,
|
||||
type: "geographical_anomaly",
|
||||
description: `User accessing from unusual country: ${country}`,
|
||||
recommendedActions: [
|
||||
"Verify user identity",
|
||||
"Check for compromised credentials",
|
||||
"Consider additional authentication",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No geographical anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
|
||||
private checkTemporalAnomaly(
|
||||
eventType: SecurityEventType,
|
||||
now: Date,
|
||||
historicalEvents: Array<{ timestamp: Date }>,
|
||||
eventBuffer: SecurityEventData[]
|
||||
): AnomalyDetectionResult {
|
||||
const currentHour = now.getHours();
|
||||
const hourlyEvents = historicalEvents.filter(
|
||||
(e) => e.timestamp.getHours() === currentHour
|
||||
);
|
||||
const avgHourlyEvents = hourlyEvents.length / 7; // 7 days average
|
||||
|
||||
const recentHourEvents = eventBuffer.filter(
|
||||
(e) =>
|
||||
e.eventType === eventType &&
|
||||
e.timestamp.getHours() === currentHour &&
|
||||
e.timestamp > new Date(now.getTime() - 60 * 60 * 1000)
|
||||
).length;
|
||||
|
||||
if (recentHourEvents > avgHourlyEvents * 3 && avgHourlyEvents > 0) {
|
||||
return {
|
||||
isAnomaly: true,
|
||||
confidence: 0.7,
|
||||
type: "temporal_anomaly",
|
||||
description: `Unusual activity spike: ${recentHourEvents} events vs ${avgHourlyEvents.toFixed(1)} average`,
|
||||
recommendedActions: [
|
||||
"Investigate source of increased activity",
|
||||
"Check for automated attacks",
|
||||
"Review recent system changes",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isAnomaly: false,
|
||||
confidence: 0,
|
||||
type: "normal",
|
||||
description: "No temporal anomalies detected",
|
||||
recommendedActions: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
374
lib/services/schedulers/BaseSchedulerService.ts
Normal file
374
lib/services/schedulers/BaseSchedulerService.ts
Normal file
@ -0,0 +1,374 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import cron from "node-cron";
|
||||
|
||||
/**
|
||||
* Scheduler status enumeration
|
||||
*/
|
||||
export enum SchedulerStatus {
|
||||
STOPPED = "STOPPED",
|
||||
STARTING = "STARTING",
|
||||
RUNNING = "RUNNING",
|
||||
PAUSED = "PAUSED",
|
||||
ERROR = "ERROR",
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler configuration interface
|
||||
*/
|
||||
export interface SchedulerConfig {
|
||||
enabled: boolean;
|
||||
interval: string;
|
||||
maxRetries: number;
|
||||
retryDelay: number;
|
||||
timeout: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler metrics interface
|
||||
*/
|
||||
export interface SchedulerMetrics {
|
||||
totalRuns: number;
|
||||
successfulRuns: number;
|
||||
failedRuns: number;
|
||||
lastRunAt: Date | null;
|
||||
lastSuccessAt: Date | null;
|
||||
lastErrorAt: Date | null;
|
||||
averageRunTime: number;
|
||||
currentStatus: SchedulerStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base abstract scheduler service class
|
||||
* Provides common functionality for all schedulers
|
||||
*/
|
||||
export abstract class BaseSchedulerService extends EventEmitter {
|
||||
protected cronJob?: cron.ScheduledTask;
|
||||
protected config: SchedulerConfig;
|
||||
protected status: SchedulerStatus = SchedulerStatus.STOPPED;
|
||||
protected metrics: SchedulerMetrics;
|
||||
protected isRunning = false;
|
||||
|
||||
constructor(
|
||||
protected name: string,
|
||||
config: Partial<SchedulerConfig> = {}
|
||||
) {
|
||||
super();
|
||||
|
||||
this.config = {
|
||||
enabled: true,
|
||||
interval: "*/5 * * * *", // Default: every 5 minutes
|
||||
maxRetries: 3,
|
||||
retryDelay: 5000,
|
||||
timeout: 30000,
|
||||
...config,
|
||||
};
|
||||
|
||||
this.metrics = {
|
||||
totalRuns: 0,
|
||||
successfulRuns: 0,
|
||||
failedRuns: 0,
|
||||
lastRunAt: null,
|
||||
lastSuccessAt: null,
|
||||
lastErrorAt: null,
|
||||
averageRunTime: 0,
|
||||
currentStatus: this.status,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract method that subclasses must implement
|
||||
* Contains the actual scheduler logic
|
||||
*/
|
||||
protected abstract executeTask(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Start the scheduler
|
||||
*/
|
||||
async start(): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
console.log(`[${this.name}] Scheduler disabled via configuration`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.status === SchedulerStatus.RUNNING) {
|
||||
console.warn(`[${this.name}] Scheduler is already running`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.status = SchedulerStatus.STARTING;
|
||||
this.emit("statusChange", this.status);
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Starting scheduler with interval: ${this.config.interval}`
|
||||
);
|
||||
|
||||
this.cronJob = cron.schedule(
|
||||
this.config.interval,
|
||||
() => this.runWithErrorHandling(),
|
||||
{
|
||||
scheduled: false, // Don't start immediately
|
||||
timezone: "UTC",
|
||||
}
|
||||
);
|
||||
|
||||
this.cronJob.start();
|
||||
this.status = SchedulerStatus.RUNNING;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("started");
|
||||
|
||||
console.log(`[${this.name}] Scheduler started successfully`);
|
||||
} catch (error) {
|
||||
this.status = SchedulerStatus.ERROR;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("error", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the scheduler
|
||||
*/
|
||||
async stop(): Promise<void> {
|
||||
if (this.status === SchedulerStatus.STOPPED) {
|
||||
console.warn(`[${this.name}] Scheduler is already stopped`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`[${this.name}] Stopping scheduler...`);
|
||||
|
||||
if (this.cronJob) {
|
||||
this.cronJob.stop();
|
||||
this.cronJob.destroy();
|
||||
this.cronJob = undefined;
|
||||
}
|
||||
|
||||
// Wait for current execution to finish if running
|
||||
while (this.isRunning) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
this.status = SchedulerStatus.STOPPED;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("stopped");
|
||||
|
||||
console.log(`[${this.name}] Scheduler stopped successfully`);
|
||||
} catch (error) {
|
||||
this.status = SchedulerStatus.ERROR;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("error", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause the scheduler
|
||||
*/
|
||||
pause(): void {
|
||||
if (this.cronJob && this.status === SchedulerStatus.RUNNING) {
|
||||
this.cronJob.stop();
|
||||
this.status = SchedulerStatus.PAUSED;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("paused");
|
||||
console.log(`[${this.name}] Scheduler paused`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume the scheduler
|
||||
*/
|
||||
resume(): void {
|
||||
if (this.cronJob && this.status === SchedulerStatus.PAUSED) {
|
||||
this.cronJob.start();
|
||||
this.status = SchedulerStatus.RUNNING;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
this.emit("resumed");
|
||||
console.log(`[${this.name}] Scheduler resumed`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current scheduler status
|
||||
*/
|
||||
getStatus(): SchedulerStatus {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler metrics
|
||||
*/
|
||||
getMetrics(): SchedulerMetrics {
|
||||
return { ...this.metrics };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler configuration
|
||||
*/
|
||||
getConfig(): SchedulerConfig {
|
||||
return { ...this.config };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update scheduler configuration
|
||||
*/
|
||||
updateConfig(newConfig: Partial<SchedulerConfig>): void {
|
||||
const wasRunning = this.status === SchedulerStatus.RUNNING;
|
||||
|
||||
if (wasRunning) {
|
||||
this.pause();
|
||||
}
|
||||
|
||||
this.config = { ...this.config, ...newConfig };
|
||||
|
||||
if (wasRunning && newConfig.interval) {
|
||||
// Recreate cron job with new interval
|
||||
if (this.cronJob) {
|
||||
this.cronJob.destroy();
|
||||
}
|
||||
|
||||
this.cronJob = cron.schedule(
|
||||
this.config.interval,
|
||||
() => this.runWithErrorHandling(),
|
||||
{
|
||||
scheduled: false,
|
||||
timezone: "UTC",
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (wasRunning) {
|
||||
this.resume();
|
||||
}
|
||||
|
||||
this.emit("configUpdated", this.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual trigger of the scheduler task
|
||||
*/
|
||||
async trigger(): Promise<void> {
|
||||
if (this.isRunning) {
|
||||
throw new Error(`[${this.name}] Task is already running`);
|
||||
}
|
||||
|
||||
await this.runWithErrorHandling();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get health status for load balancer/orchestrator
|
||||
*/
|
||||
getHealthStatus(): {
|
||||
healthy: boolean;
|
||||
status: SchedulerStatus;
|
||||
lastSuccess: Date | null;
|
||||
consecutiveFailures: number;
|
||||
} {
|
||||
const consecutiveFailures = this.calculateConsecutiveFailures();
|
||||
const healthy =
|
||||
this.status === SchedulerStatus.RUNNING &&
|
||||
consecutiveFailures < this.config.maxRetries &&
|
||||
(!this.metrics.lastErrorAt ||
|
||||
!this.metrics.lastSuccessAt ||
|
||||
this.metrics.lastSuccessAt > this.metrics.lastErrorAt);
|
||||
|
||||
return {
|
||||
healthy,
|
||||
status: this.status,
|
||||
lastSuccess: this.metrics.lastSuccessAt,
|
||||
consecutiveFailures,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the task with error handling and metrics collection
|
||||
*/
|
||||
private async runWithErrorHandling(): Promise<void> {
|
||||
if (this.isRunning) {
|
||||
console.warn(
|
||||
`[${this.name}] Previous task still running, skipping this iteration`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
this.isRunning = true;
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
this.metrics.totalRuns++;
|
||||
this.metrics.lastRunAt = new Date();
|
||||
this.emit("taskStarted");
|
||||
|
||||
// Set timeout for task execution
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(
|
||||
() => reject(new Error("Task timeout")),
|
||||
this.config.timeout
|
||||
);
|
||||
});
|
||||
|
||||
await Promise.race([this.executeTask(), timeoutPromise]);
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
this.updateRunTimeMetrics(duration);
|
||||
|
||||
this.metrics.successfulRuns++;
|
||||
this.metrics.lastSuccessAt = new Date();
|
||||
this.emit("taskCompleted", { duration });
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
this.metrics.failedRuns++;
|
||||
this.metrics.lastErrorAt = new Date();
|
||||
|
||||
console.error(`[${this.name}] Task failed:`, error);
|
||||
this.emit("taskFailed", { error, duration });
|
||||
|
||||
// Check if we should retry
|
||||
const consecutiveFailures = this.calculateConsecutiveFailures();
|
||||
if (consecutiveFailures >= this.config.maxRetries) {
|
||||
this.status = SchedulerStatus.ERROR;
|
||||
this.metrics.currentStatus = this.status;
|
||||
this.emit("statusChange", this.status);
|
||||
console.error(
|
||||
`[${this.name}] Max retries exceeded, scheduler marked as ERROR`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
this.isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update average run time metrics
|
||||
*/
|
||||
private updateRunTimeMetrics(duration: number): void {
|
||||
if (this.metrics.averageRunTime === 0) {
|
||||
this.metrics.averageRunTime = duration;
|
||||
} else {
|
||||
// Calculate running average
|
||||
this.metrics.averageRunTime =
|
||||
(this.metrics.averageRunTime + duration) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate consecutive failures for health monitoring
|
||||
*/
|
||||
private calculateConsecutiveFailures(): number {
|
||||
// This is a simplified version - in production you might want to track
|
||||
// a rolling window of recent execution results
|
||||
if (!this.metrics.lastSuccessAt || !this.metrics.lastErrorAt) {
|
||||
return this.metrics.failedRuns;
|
||||
}
|
||||
|
||||
return this.metrics.lastErrorAt > this.metrics.lastSuccessAt
|
||||
? this.metrics.failedRuns - this.metrics.successfulRuns
|
||||
: 0;
|
||||
}
|
||||
}
|
||||
317
lib/services/schedulers/CsvImportSchedulerService.ts
Normal file
317
lib/services/schedulers/CsvImportSchedulerService.ts
Normal file
@ -0,0 +1,317 @@
|
||||
import { fetchAndParseCsv } from "../../csvFetcher";
|
||||
import { prisma } from "../../prisma";
|
||||
import {
|
||||
BaseSchedulerService,
|
||||
type SchedulerConfig,
|
||||
} from "./BaseSchedulerService";
|
||||
|
||||
/**
|
||||
* CSV Import specific configuration
|
||||
*/
|
||||
export interface CsvImportSchedulerConfig extends SchedulerConfig {
|
||||
batchSize: number;
|
||||
maxConcurrentImports: number;
|
||||
skipDuplicateCheck: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* CSV Import scheduler service
|
||||
* Handles periodic CSV data import from companies
|
||||
*/
|
||||
export class CsvImportSchedulerService extends BaseSchedulerService {
|
||||
private csvConfig: CsvImportSchedulerConfig;
|
||||
|
||||
constructor(config: Partial<CsvImportSchedulerConfig> = {}) {
|
||||
const defaultConfig = {
|
||||
interval: "*/10 * * * *", // Every 10 minutes
|
||||
timeout: 300000, // 5 minutes timeout
|
||||
batchSize: 10,
|
||||
maxConcurrentImports: 5,
|
||||
skipDuplicateCheck: false,
|
||||
...config,
|
||||
};
|
||||
|
||||
super("CSV Import Scheduler", defaultConfig);
|
||||
this.csvConfig = defaultConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute CSV import task
|
||||
*/
|
||||
protected async executeTask(): Promise<void> {
|
||||
console.log(`[${this.name}] Starting CSV import batch processing...`);
|
||||
|
||||
let totalProcessed = 0;
|
||||
let totalImported = 0;
|
||||
let totalErrors = 0;
|
||||
|
||||
// Process companies in batches to avoid memory issues
|
||||
let skip = 0;
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const companies = await prisma.company.findMany({
|
||||
where: {
|
||||
status: "ACTIVE",
|
||||
csvUrl: { not: null }, // Only companies with CSV URLs
|
||||
},
|
||||
take: this.csvConfig.batchSize,
|
||||
skip: skip,
|
||||
orderBy: { createdAt: "asc" },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
csvUrl: true,
|
||||
csvUsername: true,
|
||||
csvPassword: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (companies.length === 0) {
|
||||
hasMore = false;
|
||||
break;
|
||||
}
|
||||
|
||||
totalProcessed += companies.length;
|
||||
|
||||
// Process companies with controlled concurrency
|
||||
const results = await this.processBatchWithConcurrency(companies);
|
||||
|
||||
results.forEach((result) => {
|
||||
if (result.success) {
|
||||
totalImported += result.importedCount || 0;
|
||||
} else {
|
||||
totalErrors++;
|
||||
console.error(
|
||||
`[${this.name}] Failed to process company ${result.companyId}:`,
|
||||
result.error
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
skip += this.csvConfig.batchSize;
|
||||
|
||||
// Emit progress event
|
||||
this.emit("progress", {
|
||||
processed: totalProcessed,
|
||||
imported: totalImported,
|
||||
errors: totalErrors,
|
||||
});
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Batch processing completed. ` +
|
||||
`Processed: ${totalProcessed}, Imported: ${totalImported}, Errors: ${totalErrors}`
|
||||
);
|
||||
|
||||
// Emit completion metrics
|
||||
this.emit("batchCompleted", {
|
||||
totalProcessed,
|
||||
totalImported,
|
||||
totalErrors,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a batch of companies with controlled concurrency
|
||||
*/
|
||||
private async processBatchWithConcurrency(
|
||||
companies: Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
csvUrl: string | null;
|
||||
csvUsername: string | null;
|
||||
csvPassword: string | null;
|
||||
}>
|
||||
): Promise<
|
||||
Array<{
|
||||
companyId: string;
|
||||
success: boolean;
|
||||
importedCount?: number;
|
||||
error?: Error;
|
||||
}>
|
||||
> {
|
||||
const results: Array<{
|
||||
companyId: string;
|
||||
success: boolean;
|
||||
importedCount?: number;
|
||||
error?: Error;
|
||||
}> = [];
|
||||
|
||||
// Process companies in chunks to control concurrency
|
||||
const chunkSize = this.csvConfig.maxConcurrentImports;
|
||||
for (let i = 0; i < companies.length; i += chunkSize) {
|
||||
const chunk = companies.slice(i, i + chunkSize);
|
||||
|
||||
const chunkResults = await Promise.allSettled(
|
||||
chunk.map((company) => this.processCompanyImport(company))
|
||||
);
|
||||
|
||||
chunkResults.forEach((result, index) => {
|
||||
const company = chunk[index];
|
||||
if (result.status === "fulfilled") {
|
||||
results.push({
|
||||
companyId: company.id,
|
||||
success: true,
|
||||
importedCount: result.value,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
companyId: company.id,
|
||||
success: false,
|
||||
error: result.reason,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process CSV import for a single company
|
||||
*/
|
||||
private async processCompanyImport(company: {
|
||||
id: string;
|
||||
name: string;
|
||||
csvUrl: string | null;
|
||||
csvUsername: string | null;
|
||||
csvPassword: string | null;
|
||||
}): Promise<number> {
|
||||
if (!company.csvUrl) {
|
||||
throw new Error(`Company ${company.name} has no CSV URL configured`);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Processing CSV import for company: ${company.name}`
|
||||
);
|
||||
|
||||
try {
|
||||
// Fetch and parse CSV data
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername || undefined,
|
||||
company.csvPassword || undefined
|
||||
);
|
||||
|
||||
let importedCount = 0;
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
try {
|
||||
// Check for duplicates if not skipping
|
||||
if (!this.csvConfig.skipDuplicateCheck) {
|
||||
const existing = await prisma.sessionImport.findFirst({
|
||||
where: {
|
||||
companyId: company.id,
|
||||
externalId: rawSession.externalId,
|
||||
},
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
console.log(
|
||||
`[${this.name}] Skipping duplicate session: ${rawSession.externalId} for company: ${company.name}`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Create new session import record
|
||||
await prisma.sessionImport.create({
|
||||
data: {
|
||||
companyId: company.id,
|
||||
externalId: rawSession.externalId,
|
||||
csvData: rawSession.csvData,
|
||||
status: "PENDING_PROCESSING",
|
||||
metadata: {
|
||||
importedAt: new Date().toISOString(),
|
||||
csvUrl: company.csvUrl,
|
||||
batchId: `batch_${Date.now()}`,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
importedCount++;
|
||||
} catch (sessionError) {
|
||||
console.error(
|
||||
`[${this.name}] Failed to import session ${rawSession.externalId} for company ${company.name}:`,
|
||||
sessionError
|
||||
);
|
||||
// Continue with other sessions
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[${this.name}] Successfully imported ${importedCount} sessions for company: ${company.name}`
|
||||
);
|
||||
|
||||
return importedCount;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[${this.name}] Failed to process CSV import for company ${company.name}:`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get CSV import specific metrics
|
||||
*/
|
||||
getCsvImportMetrics(): {
|
||||
totalCompaniesProcessed: number;
|
||||
totalSessionsImported: number;
|
||||
averageImportTime: number;
|
||||
errorRate: number;
|
||||
} {
|
||||
const baseMetrics = this.getMetrics();
|
||||
|
||||
// These would be enhanced with actual tracking in a production system
|
||||
return {
|
||||
totalCompaniesProcessed: baseMetrics.successfulRuns,
|
||||
totalSessionsImported: 0, // Would track actual import counts
|
||||
averageImportTime: baseMetrics.averageRunTime,
|
||||
errorRate:
|
||||
baseMetrics.totalRuns > 0
|
||||
? baseMetrics.failedRuns / baseMetrics.totalRuns
|
||||
: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger import for a specific company
|
||||
*/
|
||||
async triggerCompanyImport(companyId: string): Promise<number> {
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: companyId },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
csvUrl: true,
|
||||
csvUsername: true,
|
||||
csvPassword: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!company) {
|
||||
throw new Error(`Company with ID ${companyId} not found`);
|
||||
}
|
||||
|
||||
return this.processCompanyImport(company);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update CSV-specific configuration
|
||||
*/
|
||||
updateCsvConfig(newConfig: Partial<CsvImportSchedulerConfig>): void {
|
||||
this.csvConfig = { ...this.csvConfig, ...newConfig };
|
||||
this.updateConfig(newConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get CSV-specific configuration
|
||||
*/
|
||||
getCsvConfig(): CsvImportSchedulerConfig {
|
||||
return { ...this.csvConfig };
|
||||
}
|
||||
}
|
||||
422
lib/services/schedulers/SchedulerManager.ts
Normal file
422
lib/services/schedulers/SchedulerManager.ts
Normal file
@ -0,0 +1,422 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import {
|
||||
type BaseSchedulerService,
|
||||
SchedulerStatus,
|
||||
} from "./BaseSchedulerService";
|
||||
import { CsvImportSchedulerService } from "./CsvImportSchedulerService";
|
||||
|
||||
/**
|
||||
* Scheduler manager configuration
|
||||
*/
|
||||
export interface SchedulerManagerConfig {
|
||||
enabled: boolean;
|
||||
autoRestart: boolean;
|
||||
healthCheckInterval: number;
|
||||
maxRestartAttempts: number;
|
||||
restartDelay: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler registration interface
|
||||
*/
|
||||
export interface SchedulerRegistration {
|
||||
id: string;
|
||||
name: string;
|
||||
service: BaseSchedulerService;
|
||||
autoStart: boolean;
|
||||
critical: boolean; // If true, manager will try to restart on failure
|
||||
}
|
||||
|
||||
/**
|
||||
* Manager health status
|
||||
*/
|
||||
export interface ManagerHealthStatus {
|
||||
healthy: boolean;
|
||||
totalSchedulers: number;
|
||||
runningSchedulers: number;
|
||||
errorSchedulers: number;
|
||||
schedulerStatuses: Record<
|
||||
string,
|
||||
{
|
||||
status: SchedulerStatus;
|
||||
healthy: boolean;
|
||||
lastSuccess: Date | null;
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduler Manager
|
||||
* Orchestrates multiple scheduler services for horizontal scaling
|
||||
*/
|
||||
export class SchedulerManager extends EventEmitter {
|
||||
private schedulers = new Map<string, SchedulerRegistration>();
|
||||
private config: SchedulerManagerConfig;
|
||||
private healthCheckTimer?: NodeJS.Timeout;
|
||||
private restartAttempts = new Map<string, number>();
|
||||
|
||||
constructor(config: Partial<SchedulerManagerConfig> = {}) {
|
||||
super();
|
||||
|
||||
this.config = {
|
||||
enabled: true,
|
||||
autoRestart: true,
|
||||
healthCheckInterval: 30000, // 30 seconds
|
||||
maxRestartAttempts: 3,
|
||||
restartDelay: 5000, // 5 seconds
|
||||
...config,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a scheduler service
|
||||
*/
|
||||
registerScheduler(registration: SchedulerRegistration): void {
|
||||
if (this.schedulers.has(registration.id)) {
|
||||
throw new Error(
|
||||
`Scheduler with ID ${registration.id} is already registered`
|
||||
);
|
||||
}
|
||||
|
||||
// Set up event listeners for the scheduler
|
||||
this.setupSchedulerEventListeners(registration);
|
||||
|
||||
this.schedulers.set(registration.id, registration);
|
||||
this.restartAttempts.set(registration.id, 0);
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Registered scheduler: ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerRegistered", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a scheduler service
|
||||
*/
|
||||
async unregisterScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
// Stop the scheduler if running
|
||||
if (registration.service.getStatus() === SchedulerStatus.RUNNING) {
|
||||
await registration.service.stop();
|
||||
}
|
||||
|
||||
// Remove event listeners
|
||||
registration.service.removeAllListeners();
|
||||
|
||||
this.schedulers.delete(schedulerId);
|
||||
this.restartAttempts.delete(schedulerId);
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Unregistered scheduler: ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerUnregistered", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start all registered schedulers
|
||||
*/
|
||||
async startAll(): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
console.log("[Scheduler Manager] Disabled via configuration");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("[Scheduler Manager] Starting all schedulers...");
|
||||
|
||||
const startPromises = Array.from(this.schedulers.values())
|
||||
.filter((reg) => reg.autoStart)
|
||||
.map(async (registration) => {
|
||||
try {
|
||||
await registration.service.start();
|
||||
console.log(`[Scheduler Manager] Started: ${registration.name}`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Failed to start ${registration.name}:`,
|
||||
error
|
||||
);
|
||||
this.emit("schedulerStartFailed", { registration, error });
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(startPromises);
|
||||
|
||||
// Start health monitoring
|
||||
this.startHealthMonitoring();
|
||||
|
||||
console.log("[Scheduler Manager] All schedulers started");
|
||||
this.emit("allSchedulersStarted");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all registered schedulers
|
||||
*/
|
||||
async stopAll(): Promise<void> {
|
||||
console.log("[Scheduler Manager] Stopping all schedulers...");
|
||||
|
||||
// Stop health monitoring
|
||||
this.stopHealthMonitoring();
|
||||
|
||||
const stopPromises = Array.from(this.schedulers.values()).map(
|
||||
async (registration) => {
|
||||
try {
|
||||
await registration.service.stop();
|
||||
console.log(`[Scheduler Manager] Stopped: ${registration.name}`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Failed to stop ${registration.name}:`,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
await Promise.allSettled(stopPromises);
|
||||
|
||||
console.log("[Scheduler Manager] All schedulers stopped");
|
||||
this.emit("allSchedulersStopped");
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a specific scheduler
|
||||
*/
|
||||
async startScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
await registration.service.start();
|
||||
this.emit("schedulerStarted", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a specific scheduler
|
||||
*/
|
||||
async stopScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
await registration.service.stop();
|
||||
this.emit("schedulerStopped", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get health status of all schedulers
|
||||
*/
|
||||
getHealthStatus(): ManagerHealthStatus {
|
||||
const schedulerStatuses: Record<
|
||||
string,
|
||||
{
|
||||
status: SchedulerStatus;
|
||||
healthy: boolean;
|
||||
lastSuccess: Date | null;
|
||||
}
|
||||
> = {};
|
||||
|
||||
let runningCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const [id, registration] of this.schedulers) {
|
||||
const health = registration.service.getHealthStatus();
|
||||
const status = registration.service.getStatus();
|
||||
|
||||
schedulerStatuses[id] = {
|
||||
status,
|
||||
healthy: health.healthy,
|
||||
lastSuccess: health.lastSuccess,
|
||||
};
|
||||
|
||||
if (status === SchedulerStatus.RUNNING) runningCount++;
|
||||
if (status === SchedulerStatus.ERROR) errorCount++;
|
||||
}
|
||||
|
||||
const totalSchedulers = this.schedulers.size;
|
||||
const healthy = errorCount === 0 && runningCount > 0;
|
||||
|
||||
return {
|
||||
healthy,
|
||||
totalSchedulers,
|
||||
runningSchedulers: runningCount,
|
||||
errorSchedulers: errorCount,
|
||||
schedulerStatuses,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered schedulers
|
||||
*/
|
||||
getSchedulers(): Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
status: SchedulerStatus;
|
||||
metrics: any;
|
||||
}> {
|
||||
return Array.from(this.schedulers.entries()).map(([id, registration]) => ({
|
||||
id,
|
||||
name: registration.name,
|
||||
status: registration.service.getStatus(),
|
||||
metrics: registration.service.getMetrics(),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific scheduler
|
||||
*/
|
||||
getScheduler(schedulerId: string): BaseSchedulerService | null {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
return registration ? registration.service : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual execution of a specific scheduler
|
||||
*/
|
||||
async triggerScheduler(schedulerId: string): Promise<void> {
|
||||
const registration = this.schedulers.get(schedulerId);
|
||||
if (!registration) {
|
||||
throw new Error(`Scheduler with ID ${schedulerId} is not registered`);
|
||||
}
|
||||
|
||||
await registration.service.trigger();
|
||||
this.emit("schedulerTriggered", registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for a scheduler
|
||||
*/
|
||||
private setupSchedulerEventListeners(
|
||||
registration: SchedulerRegistration
|
||||
): void {
|
||||
const { service } = registration;
|
||||
|
||||
service.on("statusChange", (status: SchedulerStatus) => {
|
||||
this.emit("schedulerStatusChanged", { registration, status });
|
||||
|
||||
// Handle automatic restart for critical schedulers
|
||||
if (
|
||||
status === SchedulerStatus.ERROR &&
|
||||
registration.critical &&
|
||||
this.config.autoRestart
|
||||
) {
|
||||
this.handleSchedulerFailure(registration);
|
||||
}
|
||||
});
|
||||
|
||||
service.on("taskCompleted", (data) => {
|
||||
this.emit("schedulerTaskCompleted", { registration, data });
|
||||
// Reset restart attempts on successful completion
|
||||
this.restartAttempts.set(registration.id, 0);
|
||||
});
|
||||
|
||||
service.on("taskFailed", (data) => {
|
||||
this.emit("schedulerTaskFailed", { registration, data });
|
||||
});
|
||||
|
||||
service.on("error", (error) => {
|
||||
this.emit("schedulerError", { registration, error });
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle scheduler failure with automatic restart
|
||||
*/
|
||||
private async handleSchedulerFailure(
|
||||
registration: SchedulerRegistration
|
||||
): Promise<void> {
|
||||
const attempts = this.restartAttempts.get(registration.id) || 0;
|
||||
|
||||
if (attempts >= this.config.maxRestartAttempts) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Max restart attempts exceeded for ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerRestartFailed", registration);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Attempting to restart ${registration.name} (attempt ${attempts + 1})`
|
||||
);
|
||||
|
||||
// Wait before restart
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, this.config.restartDelay)
|
||||
);
|
||||
|
||||
try {
|
||||
await registration.service.stop();
|
||||
await registration.service.start();
|
||||
|
||||
console.log(
|
||||
`[Scheduler Manager] Successfully restarted ${registration.name}`
|
||||
);
|
||||
this.emit("schedulerRestarted", registration);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Scheduler Manager] Failed to restart ${registration.name}:`,
|
||||
error
|
||||
);
|
||||
this.restartAttempts.set(registration.id, attempts + 1);
|
||||
this.emit("schedulerRestartError", { registration, error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start health monitoring
|
||||
*/
|
||||
private startHealthMonitoring(): void {
|
||||
if (this.healthCheckTimer) return;
|
||||
|
||||
this.healthCheckTimer = setInterval(() => {
|
||||
const health = this.getHealthStatus();
|
||||
this.emit("healthCheck", health);
|
||||
|
||||
if (!health.healthy) {
|
||||
console.warn("[Scheduler Manager] Health check failed:", health);
|
||||
}
|
||||
}, this.config.healthCheckInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop health monitoring
|
||||
*/
|
||||
private stopHealthMonitoring(): void {
|
||||
if (this.healthCheckTimer) {
|
||||
clearInterval(this.healthCheckTimer);
|
||||
this.healthCheckTimer = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and register default schedulers
|
||||
*/
|
||||
static createDefaultSchedulers(): SchedulerManager {
|
||||
const manager = new SchedulerManager();
|
||||
|
||||
// Register CSV Import Scheduler
|
||||
manager.registerScheduler({
|
||||
id: "csv-import",
|
||||
name: "CSV Import Scheduler",
|
||||
service: new CsvImportSchedulerService({
|
||||
interval: "*/10 * * * *", // Every 10 minutes
|
||||
}),
|
||||
autoStart: true,
|
||||
critical: true,
|
||||
});
|
||||
|
||||
// Additional schedulers would be registered here
|
||||
// manager.registerScheduler({
|
||||
// id: "processing",
|
||||
// name: "Session Processing Scheduler",
|
||||
// service: new SessionProcessingSchedulerService(),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
return manager;
|
||||
}
|
||||
}
|
||||
274
lib/services/schedulers/ServerSchedulerIntegration.ts
Normal file
274
lib/services/schedulers/ServerSchedulerIntegration.ts
Normal file
@ -0,0 +1,274 @@
|
||||
import { getSchedulerConfig } from "../../env";
|
||||
import { CsvImportSchedulerService } from "./CsvImportSchedulerService";
|
||||
import { SchedulerManager } from "./SchedulerManager";
|
||||
|
||||
/**
|
||||
* Server-side scheduler integration
|
||||
* Manages all schedulers for the application server
|
||||
*/
|
||||
export class ServerSchedulerIntegration {
|
||||
private static instance: ServerSchedulerIntegration;
|
||||
private manager: SchedulerManager;
|
||||
private isInitialized = false;
|
||||
|
||||
private constructor() {
|
||||
this.manager = new SchedulerManager({
|
||||
enabled: true,
|
||||
autoRestart: true,
|
||||
healthCheckInterval: 30000,
|
||||
maxRestartAttempts: 3,
|
||||
restartDelay: 5000,
|
||||
});
|
||||
|
||||
this.setupManagerEventListeners();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get singleton instance
|
||||
*/
|
||||
static getInstance(): ServerSchedulerIntegration {
|
||||
if (!ServerSchedulerIntegration.instance) {
|
||||
ServerSchedulerIntegration.instance = new ServerSchedulerIntegration();
|
||||
}
|
||||
return ServerSchedulerIntegration.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize schedulers based on environment configuration
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
console.warn("[Server Scheduler Integration] Already initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
const config = getSchedulerConfig();
|
||||
|
||||
if (!config.enabled) {
|
||||
console.log(
|
||||
"[Server Scheduler Integration] Schedulers disabled via configuration"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("[Server Scheduler Integration] Initializing schedulers...");
|
||||
|
||||
// Register CSV Import Scheduler
|
||||
this.manager.registerScheduler({
|
||||
id: "csv-import",
|
||||
name: "CSV Import Scheduler",
|
||||
service: new CsvImportSchedulerService({
|
||||
enabled: config.csvImport.enabled,
|
||||
interval: config.csvImport.interval,
|
||||
timeout: 300000, // 5 minutes
|
||||
batchSize: 10,
|
||||
maxConcurrentImports: 5,
|
||||
}),
|
||||
autoStart: true,
|
||||
critical: true,
|
||||
});
|
||||
|
||||
// TODO: Add other schedulers when they are converted
|
||||
// this.manager.registerScheduler({
|
||||
// id: "import-processing",
|
||||
// name: "Import Processing Scheduler",
|
||||
// service: new ImportProcessingSchedulerService({
|
||||
// enabled: config.importProcessing.enabled,
|
||||
// interval: config.importProcessing.interval,
|
||||
// }),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
// this.manager.registerScheduler({
|
||||
// id: "session-processing",
|
||||
// name: "Session Processing Scheduler",
|
||||
// service: new SessionProcessingSchedulerService({
|
||||
// enabled: config.sessionProcessing.enabled,
|
||||
// interval: config.sessionProcessing.interval,
|
||||
// }),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
// this.manager.registerScheduler({
|
||||
// id: "batch-processing",
|
||||
// name: "Batch Processing Scheduler",
|
||||
// service: new BatchProcessingSchedulerService({
|
||||
// enabled: config.batchProcessing.enabled,
|
||||
// interval: config.batchProcessing.interval,
|
||||
// }),
|
||||
// autoStart: true,
|
||||
// critical: true,
|
||||
// });
|
||||
|
||||
// Start all registered schedulers
|
||||
await this.manager.startAll();
|
||||
|
||||
this.isInitialized = true;
|
||||
console.log(
|
||||
"[Server Scheduler Integration] All schedulers initialized successfully"
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[Server Scheduler Integration] Failed to initialize schedulers:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Shutdown all schedulers
|
||||
*/
|
||||
async shutdown(): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
console.warn("[Server Scheduler Integration] Not initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("[Server Scheduler Integration] Shutting down schedulers...");
|
||||
await this.manager.stopAll();
|
||||
this.isInitialized = false;
|
||||
console.log("[Server Scheduler Integration] All schedulers stopped");
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[Server Scheduler Integration] Error during shutdown:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler manager for external access
|
||||
*/
|
||||
getManager(): SchedulerManager {
|
||||
return this.manager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get health status of all schedulers
|
||||
*/
|
||||
getHealthStatus() {
|
||||
return this.manager.getHealthStatus();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of all schedulers with their status
|
||||
*/
|
||||
getSchedulersList() {
|
||||
return this.manager.getSchedulers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual execution of a specific scheduler
|
||||
*/
|
||||
async triggerScheduler(schedulerId: string): Promise<void> {
|
||||
return this.manager.triggerScheduler(schedulerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a specific scheduler
|
||||
*/
|
||||
async startScheduler(schedulerId: string): Promise<void> {
|
||||
return this.manager.startScheduler(schedulerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a specific scheduler
|
||||
*/
|
||||
async stopScheduler(schedulerId: string): Promise<void> {
|
||||
return this.manager.stopScheduler(schedulerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for the manager
|
||||
*/
|
||||
private setupManagerEventListeners(): void {
|
||||
this.manager.on("schedulerStatusChanged", ({ registration, status }) => {
|
||||
console.log(
|
||||
`[Server Scheduler Integration] ${registration.name} status changed to: ${status}`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerTaskCompleted", ({ registration, data }) => {
|
||||
console.log(
|
||||
`[Server Scheduler Integration] ${registration.name} task completed in ${data.duration}ms`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerTaskFailed", ({ registration, data }) => {
|
||||
console.error(
|
||||
`[Server Scheduler Integration] ${registration.name} task failed:`,
|
||||
data.error
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerRestarted", (registration) => {
|
||||
console.log(
|
||||
`[Server Scheduler Integration] Successfully restarted: ${registration.name}`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("schedulerRestartFailed", (registration) => {
|
||||
console.error(
|
||||
`[Server Scheduler Integration] Failed to restart: ${registration.name}`
|
||||
);
|
||||
});
|
||||
|
||||
this.manager.on("healthCheck", (health) => {
|
||||
if (!health.healthy) {
|
||||
console.warn("[Server Scheduler Integration] Health check failed:", {
|
||||
totalSchedulers: health.totalSchedulers,
|
||||
runningSchedulers: health.runningSchedulers,
|
||||
errorSchedulers: health.errorSchedulers,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle graceful shutdown
|
||||
*/
|
||||
async handleGracefulShutdown(): Promise<void> {
|
||||
console.log(
|
||||
"[Server Scheduler Integration] Received shutdown signal, stopping schedulers..."
|
||||
);
|
||||
|
||||
try {
|
||||
await this.shutdown();
|
||||
console.log("[Server Scheduler Integration] Graceful shutdown completed");
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[Server Scheduler Integration] Error during graceful shutdown:",
|
||||
error
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience function to get the scheduler integration instance
|
||||
*/
|
||||
export const getSchedulerIntegration = () =>
|
||||
ServerSchedulerIntegration.getInstance();
|
||||
|
||||
/**
|
||||
* Initialize schedulers for server startup
|
||||
*/
|
||||
export const initializeSchedulers = async (): Promise<void> => {
|
||||
const integration = getSchedulerIntegration();
|
||||
await integration.initialize();
|
||||
};
|
||||
|
||||
/**
|
||||
* Shutdown schedulers for server shutdown
|
||||
*/
|
||||
export const shutdownSchedulers = async (): Promise<void> => {
|
||||
const integration = getSchedulerIntegration();
|
||||
await integration.shutdown();
|
||||
};
|
||||
272
lib/services/schedulers/StandaloneSchedulerRunner.ts
Normal file
272
lib/services/schedulers/StandaloneSchedulerRunner.ts
Normal file
@ -0,0 +1,272 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Standalone Scheduler Runner
|
||||
* Runs individual schedulers as separate processes for horizontal scaling
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=csv-import
|
||||
* npx tsx lib/services/schedulers/StandaloneSchedulerRunner.ts --scheduler=session-processing
|
||||
*/
|
||||
|
||||
import { Command } from "commander";
|
||||
import { validateEnv } from "../../env";
|
||||
import {
|
||||
type BaseSchedulerService,
|
||||
SchedulerStatus,
|
||||
} from "./BaseSchedulerService";
|
||||
import { CsvImportSchedulerService } from "./CsvImportSchedulerService";
|
||||
|
||||
interface SchedulerFactory {
|
||||
[key: string]: () => BaseSchedulerService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Available schedulers for standalone execution
|
||||
*/
|
||||
const AVAILABLE_SCHEDULERS: SchedulerFactory = {
|
||||
"csv-import": () =>
|
||||
new CsvImportSchedulerService({
|
||||
interval: process.env.CSV_IMPORT_INTERVAL || "*/10 * * * *",
|
||||
timeout: Number.parseInt(process.env.CSV_IMPORT_TIMEOUT || "300000"),
|
||||
batchSize: Number.parseInt(process.env.CSV_IMPORT_BATCH_SIZE || "10"),
|
||||
maxConcurrentImports: Number.parseInt(
|
||||
process.env.CSV_IMPORT_MAX_CONCURRENT || "5"
|
||||
),
|
||||
}),
|
||||
|
||||
// Additional schedulers would be added here:
|
||||
// "import-processing": () => new ImportProcessingSchedulerService({
|
||||
// interval: process.env.IMPORT_PROCESSING_INTERVAL || "*/2 * * * *",
|
||||
// }),
|
||||
// "session-processing": () => new SessionProcessingSchedulerService({
|
||||
// interval: process.env.SESSION_PROCESSING_INTERVAL || "*/5 * * * *",
|
||||
// }),
|
||||
// "batch-processing": () => new BatchProcessingSchedulerService({
|
||||
// interval: process.env.BATCH_PROCESSING_INTERVAL || "*/5 * * * *",
|
||||
// }),
|
||||
};
|
||||
|
||||
/**
|
||||
* Standalone Scheduler Runner Class
|
||||
*/
|
||||
class StandaloneSchedulerRunner {
|
||||
private scheduler?: BaseSchedulerService;
|
||||
private isShuttingDown = false;
|
||||
|
||||
constructor(private schedulerName: string) {}
|
||||
|
||||
/**
|
||||
* Run the specified scheduler
|
||||
*/
|
||||
async run(): Promise<void> {
|
||||
try {
|
||||
// Validate environment
|
||||
const envValidation = validateEnv();
|
||||
if (!envValidation.valid) {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Environment validation errors:",
|
||||
envValidation.errors
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Create scheduler instance
|
||||
const factory = AVAILABLE_SCHEDULERS[this.schedulerName];
|
||||
if (!factory) {
|
||||
console.error(
|
||||
`[Standalone Scheduler] Unknown scheduler: ${this.schedulerName}`
|
||||
);
|
||||
console.error(
|
||||
`Available schedulers: ${Object.keys(AVAILABLE_SCHEDULERS).join(", ")}`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
this.scheduler = factory();
|
||||
|
||||
// Setup event listeners
|
||||
this.setupEventListeners();
|
||||
|
||||
// Setup graceful shutdown
|
||||
this.setupGracefulShutdown();
|
||||
|
||||
console.log(`[Standalone Scheduler] Starting ${this.schedulerName}...`);
|
||||
|
||||
// Start the scheduler
|
||||
await this.scheduler.start();
|
||||
|
||||
console.log(`[Standalone Scheduler] ${this.schedulerName} is running`);
|
||||
|
||||
// Keep the process alive
|
||||
this.keepAlive();
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Standalone Scheduler] Failed to start ${this.schedulerName}:`,
|
||||
error
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for the scheduler
|
||||
*/
|
||||
private setupEventListeners(): void {
|
||||
if (!this.scheduler) return;
|
||||
|
||||
this.scheduler.on("statusChange", (status: SchedulerStatus) => {
|
||||
console.log(`[Standalone Scheduler] Status changed to: ${status}`);
|
||||
|
||||
if (status === SchedulerStatus.ERROR && !this.isShuttingDown) {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Scheduler entered ERROR state, exiting..."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
this.scheduler.on("taskCompleted", (data) => {
|
||||
console.log(
|
||||
`[Standalone Scheduler] Task completed in ${data.duration}ms`
|
||||
);
|
||||
});
|
||||
|
||||
this.scheduler.on("taskFailed", (data) => {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Task failed:",
|
||||
data.error?.message || data.error
|
||||
);
|
||||
});
|
||||
|
||||
this.scheduler.on("started", () => {
|
||||
console.log(
|
||||
`[Standalone Scheduler] ${this.schedulerName} started successfully`
|
||||
);
|
||||
});
|
||||
|
||||
this.scheduler.on("stopped", () => {
|
||||
console.log(`[Standalone Scheduler] ${this.schedulerName} stopped`);
|
||||
});
|
||||
|
||||
// Setup health reporting
|
||||
setInterval(() => {
|
||||
if (this.scheduler && !this.isShuttingDown) {
|
||||
const health = this.scheduler.getHealthStatus();
|
||||
const metrics = this.scheduler.getMetrics();
|
||||
|
||||
console.log(
|
||||
`[Standalone Scheduler] Health: ${health.healthy ? "OK" : "UNHEALTHY"}, ` +
|
||||
`Runs: ${metrics.totalRuns}, Success: ${metrics.successfulRuns}, ` +
|
||||
`Failed: ${metrics.failedRuns}, Avg Time: ${metrics.averageRunTime}ms`
|
||||
);
|
||||
}
|
||||
}, 60000); // Every minute
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup graceful shutdown handlers
|
||||
*/
|
||||
private setupGracefulShutdown(): void {
|
||||
const gracefulShutdown = async (signal: string) => {
|
||||
if (this.isShuttingDown) return;
|
||||
|
||||
console.log(
|
||||
`[Standalone Scheduler] Received ${signal}, shutting down gracefully...`
|
||||
);
|
||||
this.isShuttingDown = true;
|
||||
|
||||
try {
|
||||
if (this.scheduler) {
|
||||
await this.scheduler.stop();
|
||||
}
|
||||
console.log("[Standalone Scheduler] Graceful shutdown completed");
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error("[Standalone Scheduler] Error during shutdown:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
process.on("SIGINT", () => gracefulShutdown("SIGINT"));
|
||||
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"));
|
||||
|
||||
process.on("uncaughtException", (error) => {
|
||||
console.error("[Standalone Scheduler] Uncaught exception:", error);
|
||||
gracefulShutdown("uncaughtException");
|
||||
});
|
||||
|
||||
process.on("unhandledRejection", (reason, promise) => {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Unhandled rejection at:",
|
||||
promise,
|
||||
"reason:",
|
||||
reason
|
||||
);
|
||||
gracefulShutdown("unhandledRejection");
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Keep the process alive
|
||||
*/
|
||||
private keepAlive(): void {
|
||||
// Setup periodic health checks
|
||||
setInterval(() => {
|
||||
if (!this.isShuttingDown && this.scheduler) {
|
||||
const status = this.scheduler.getStatus();
|
||||
if (status === SchedulerStatus.ERROR) {
|
||||
console.error(
|
||||
"[Standalone Scheduler] Scheduler is in ERROR state, exiting..."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}, 30000); // Every 30 seconds
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main execution function
|
||||
*/
|
||||
async function main(): Promise<void> {
|
||||
const program = new Command();
|
||||
|
||||
program
|
||||
.name("standalone-scheduler")
|
||||
.description("Run individual schedulers as standalone processes")
|
||||
.version("1.0.0")
|
||||
.requiredOption("-s, --scheduler <name>", "Scheduler name to run")
|
||||
.option("-l, --list", "List available schedulers")
|
||||
.parse();
|
||||
|
||||
const options = program.opts();
|
||||
|
||||
if (options.list) {
|
||||
console.log("Available schedulers:");
|
||||
Object.keys(AVAILABLE_SCHEDULERS).forEach((name) => {
|
||||
console.log(` - ${name}`);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!options.scheduler) {
|
||||
console.error(
|
||||
"Scheduler name is required. Use --list to see available schedulers."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const runner = new StandaloneSchedulerRunner(options.scheduler);
|
||||
await runner.run();
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
main().catch((error) => {
|
||||
console.error("[Standalone Scheduler] Fatal error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
export { StandaloneSchedulerRunner, AVAILABLE_SCHEDULERS };
|
||||
@ -70,6 +70,7 @@ export interface ChatSession {
|
||||
summary?: string | null; // Brief summary of the conversation
|
||||
messages?: Message[]; // Parsed messages from transcript
|
||||
transcriptContent?: string | null; // Full transcript content
|
||||
questions?: string[]; // Extracted questions from the conversation
|
||||
}
|
||||
|
||||
export interface SessionQuery {
|
||||
|
||||
137
lib/utils/BoundedBuffer.ts
Normal file
137
lib/utils/BoundedBuffer.ts
Normal file
@ -0,0 +1,137 @@
|
||||
/**
|
||||
* Bounded buffer implementation to prevent memory leaks
|
||||
* Automatically manages size and provides efficient operations
|
||||
*/
|
||||
|
||||
export interface BoundedBufferOptions {
|
||||
maxSize: number;
|
||||
cleanupThreshold?: number; // Start cleanup when buffer reaches this ratio (default 0.9)
|
||||
retentionTime?: number; // Time in milliseconds to retain items (default 1 hour)
|
||||
}
|
||||
|
||||
export class BoundedBuffer<T extends { timestamp: Date }> {
|
||||
private buffer: T[] = [];
|
||||
private readonly maxSize: number;
|
||||
private readonly cleanupThreshold: number;
|
||||
private readonly retentionTime: number;
|
||||
private lastCleanup: Date = new Date();
|
||||
|
||||
constructor(options: BoundedBufferOptions) {
|
||||
this.maxSize = options.maxSize;
|
||||
this.cleanupThreshold = options.cleanupThreshold ?? 0.9;
|
||||
this.retentionTime = options.retentionTime ?? 60 * 60 * 1000; // 1 hour default
|
||||
}
|
||||
|
||||
/**
|
||||
* Add item to buffer with automatic cleanup
|
||||
*/
|
||||
push(item: T): void {
|
||||
// eslint-disable-line no-unused-vars
|
||||
this.buffer.push(item);
|
||||
|
||||
// Trigger cleanup if threshold reached
|
||||
if (this.buffer.length >= this.maxSize * this.cleanupThreshold) {
|
||||
this.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all items in buffer
|
||||
*/
|
||||
getAll(): readonly T[] {
|
||||
return [...this.buffer];
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter items by predicate
|
||||
*/
|
||||
filter(predicate: (item: T) => boolean): T[] {
|
||||
return this.buffer.filter(predicate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get items within time range
|
||||
*/
|
||||
getWithinTime(timeRangeMs: number): T[] {
|
||||
const cutoff = new Date(Date.now() - timeRangeMs);
|
||||
return this.buffer.filter((item) => item.timestamp >= cutoff);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get buffer statistics
|
||||
*/
|
||||
getStats(): {
|
||||
size: number;
|
||||
maxSize: number;
|
||||
utilizationRatio: number;
|
||||
oldestItem?: Date;
|
||||
newestItem?: Date;
|
||||
} {
|
||||
const size = this.buffer.length;
|
||||
const oldestItem = size > 0 ? this.buffer[0]?.timestamp : undefined;
|
||||
const newestItem = size > 0 ? this.buffer[size - 1]?.timestamp : undefined;
|
||||
|
||||
return {
|
||||
size,
|
||||
maxSize: this.maxSize,
|
||||
utilizationRatio: size / this.maxSize,
|
||||
oldestItem,
|
||||
newestItem,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Force cleanup of old items
|
||||
*/
|
||||
cleanup(): void {
|
||||
const cutoff = new Date(Date.now() - this.retentionTime);
|
||||
const initialSize = this.buffer.length;
|
||||
|
||||
// Remove items older than retention time
|
||||
this.buffer = this.buffer.filter((item) => item.timestamp >= cutoff);
|
||||
|
||||
// If still over limit, remove oldest items to maintain max size
|
||||
if (this.buffer.length > this.maxSize) {
|
||||
this.buffer = this.buffer.slice(-this.maxSize);
|
||||
}
|
||||
|
||||
this.lastCleanup = new Date();
|
||||
|
||||
// Log cleanup statistics (for monitoring)
|
||||
const removedItems = initialSize - this.buffer.length;
|
||||
if (removedItems > 0) {
|
||||
console.debug(
|
||||
`BoundedBuffer: Cleaned up ${removedItems} items, buffer size: ${this.buffer.length}/${this.maxSize}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all items from buffer
|
||||
*/
|
||||
clear(): void {
|
||||
this.buffer.length = 0;
|
||||
this.lastCleanup = new Date();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current buffer size
|
||||
*/
|
||||
get size(): number {
|
||||
return this.buffer.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if buffer is approaching capacity
|
||||
*/
|
||||
get isNearCapacity(): boolean {
|
||||
return this.buffer.length >= this.maxSize * this.cleanupThreshold;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get time since last cleanup
|
||||
*/
|
||||
get timeSinceLastCleanup(): number {
|
||||
return Date.now() - this.lastCleanup.getTime();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,148 @@
|
||||
-- Critical Performance Indexes Migration
|
||||
-- Addresses scalability bottlenecks identified in architectural analysis
|
||||
-- All indexes created with CONCURRENTLY for production safety
|
||||
|
||||
-- =====================================================
|
||||
-- 1. Security Monitoring Performance Indexes
|
||||
-- =====================================================
|
||||
|
||||
-- Security audit log analysis with covering columns
|
||||
CREATE INDEX CONCURRENTLY "SecurityAuditLog_companyId_eventType_outcome_timestamp_idx"
|
||||
ON "SecurityAuditLog" ("companyId", "eventType", "outcome", "timestamp")
|
||||
INCLUDE ("severity", "userId", "ipAddress", "country");
|
||||
|
||||
-- Geographic threat detection (partial index for efficiency)
|
||||
CREATE INDEX CONCURRENTLY "SecurityAuditLog_geographic_threat_idx"
|
||||
ON "SecurityAuditLog" ("ipAddress", "country", "timestamp")
|
||||
WHERE "outcome" IN ('FAILURE', 'BLOCKED', 'SUSPICIOUS')
|
||||
INCLUDE ("eventType", "severity", "userId", "companyId");
|
||||
|
||||
-- Time-based audit analysis for compliance reporting
|
||||
CREATE INDEX CONCURRENTLY "SecurityAuditLog_timestamp_companyId_covering_idx"
|
||||
ON "SecurityAuditLog" ("timestamp", "companyId")
|
||||
INCLUDE ("eventType", "outcome", "severity", "userId");
|
||||
|
||||
-- =====================================================
|
||||
-- 2. AI Processing Request Optimizations
|
||||
-- =====================================================
|
||||
|
||||
-- Session-based AI processing queries with covering columns
|
||||
CREATE INDEX CONCURRENTLY "AIProcessingRequest_sessionId_processingStatus_requestedAt_idx"
|
||||
ON "AIProcessingRequest" ("sessionId", "processingStatus", "requestedAt");
|
||||
|
||||
-- Covering index for batch processing efficiency
|
||||
CREATE INDEX CONCURRENTLY "AIProcessingRequest_session_companyId_processingStatus_idx"
|
||||
ON "AIProcessingRequest" ("sessionId")
|
||||
INCLUDE ("processingStatus", "batchId", "requestedAt");
|
||||
|
||||
-- Batch status monitoring and cost analysis
|
||||
CREATE INDEX CONCURRENTLY "AIProcessingRequest_batchId_processingStatus_idx"
|
||||
ON "AIProcessingRequest" ("batchId", "processingStatus")
|
||||
WHERE "batchId" IS NOT NULL
|
||||
INCLUDE ("requestedAt", "completedAt", "tokensUsed", "cost");
|
||||
|
||||
-- Processing status tracking for schedulers
|
||||
CREATE INDEX CONCURRENTLY "AIProcessingRequest_processingStatus_requestedAt_idx"
|
||||
ON "AIProcessingRequest" ("processingStatus", "requestedAt")
|
||||
WHERE "processingStatus" IN ('PENDING', 'PROCESSING', 'RETRY_PENDING')
|
||||
INCLUDE ("sessionId", "batchId", "retryCount");
|
||||
|
||||
-- =====================================================
|
||||
-- 3. Session Analytics Optimizations
|
||||
-- =====================================================
|
||||
|
||||
-- Time-range queries with sentiment filtering for dashboards
|
||||
CREATE INDEX CONCURRENTLY "Session_companyId_startTime_sentiment_covering_idx"
|
||||
ON "Session" ("companyId", "startTime", "overallSentiment")
|
||||
INCLUDE ("endTime", "messagesSent", "escalated", "category");
|
||||
|
||||
-- Performance analysis queries for monitoring
|
||||
CREATE INDEX CONCURRENTLY "Session_companyId_performance_idx"
|
||||
ON "Session" ("companyId", "avgResponseTime", "escalated")
|
||||
INCLUDE ("startTime", "messagesSent");
|
||||
|
||||
-- Category and language filtering for analytics
|
||||
CREATE INDEX CONCURRENTLY "Session_companyId_category_language_idx"
|
||||
ON "Session" ("companyId", "category", "language")
|
||||
INCLUDE ("startTime", "endTime", "overallSentiment", "messagesSent");
|
||||
|
||||
-- Import tracking for processing pipeline
|
||||
CREATE INDEX CONCURRENTLY "Session_importId_companyId_idx"
|
||||
ON "Session" ("importId", "companyId")
|
||||
WHERE "importId" IS NOT NULL
|
||||
INCLUDE ("startTime", "category", "overallSentiment");
|
||||
|
||||
-- =====================================================
|
||||
-- 4. Message Processing Optimizations
|
||||
-- =====================================================
|
||||
|
||||
-- Message timeline with role filtering (covering index)
|
||||
CREATE INDEX CONCURRENTLY "Message_sessionId_timestamp_role_covering_idx"
|
||||
ON "Message" ("sessionId", "timestamp", "role")
|
||||
INCLUDE ("content");
|
||||
|
||||
-- Message counting and analysis queries
|
||||
CREATE INDEX CONCURRENTLY "Message_sessionId_role_timestamp_idx"
|
||||
ON "Message" ("sessionId", "role", "timestamp");
|
||||
|
||||
-- =====================================================
|
||||
-- 5. Processing Pipeline Status Tracking
|
||||
-- =====================================================
|
||||
|
||||
-- Processing pipeline monitoring with error analysis
|
||||
CREATE INDEX CONCURRENTLY "SessionProcessingStatus_stage_status_startedAt_idx"
|
||||
ON "SessionProcessingStatus" ("stage", "status", "startedAt")
|
||||
INCLUDE ("sessionId", "completedAt", "retryCount");
|
||||
|
||||
-- Error analysis (partial index for failed states)
|
||||
CREATE INDEX CONCURRENTLY "SessionProcessingStatus_error_analysis_idx"
|
||||
ON "SessionProcessingStatus" ("status", "stage")
|
||||
WHERE "status" IN ('FAILED', 'RETRY_PENDING')
|
||||
INCLUDE ("sessionId", "errorMessage", "retryCount", "startedAt");
|
||||
|
||||
-- Session-specific processing status lookup
|
||||
CREATE INDEX CONCURRENTLY "SessionProcessingStatus_sessionId_stage_status_idx"
|
||||
ON "SessionProcessingStatus" ("sessionId", "stage", "status")
|
||||
INCLUDE ("startedAt", "completedAt", "retryCount");
|
||||
|
||||
-- =====================================================
|
||||
-- 6. Company and User Access Optimizations
|
||||
-- =====================================================
|
||||
|
||||
-- User lookup by email and company (authentication)
|
||||
CREATE INDEX CONCURRENTLY "User_email_companyId_active_idx"
|
||||
ON "User" ("email", "companyId")
|
||||
WHERE "active" = true
|
||||
INCLUDE ("role", "hashedPassword", "lastLoginAt");
|
||||
|
||||
-- Company access validation
|
||||
CREATE INDEX CONCURRENTLY "User_companyId_role_active_idx"
|
||||
ON "User" ("companyId", "role", "active")
|
||||
INCLUDE ("email", "lastLoginAt");
|
||||
|
||||
-- Platform user authentication
|
||||
CREATE INDEX CONCURRENTLY "PlatformUser_email_active_idx"
|
||||
ON "PlatformUser" ("email")
|
||||
WHERE "active" = true
|
||||
INCLUDE ("role", "hashedPassword", "lastLoginAt");
|
||||
|
||||
-- =====================================================
|
||||
-- 7. Session Import Processing
|
||||
-- =====================================================
|
||||
|
||||
-- Import processing status tracking
|
||||
CREATE INDEX CONCURRENTLY "SessionImport_companyId_processingStatus_createdAt_idx"
|
||||
ON "SessionImport" ("companyId", "processingStatus", "createdAt")
|
||||
INCLUDE ("id", "csvUrl", "processedAt");
|
||||
|
||||
-- Pending imports for scheduler processing
|
||||
CREATE INDEX CONCURRENTLY "SessionImport_processingStatus_createdAt_idx"
|
||||
ON "SessionImport" ("processingStatus", "createdAt")
|
||||
WHERE "processingStatus" IN ('PENDING', 'PROCESSING', 'RETRY_PENDING')
|
||||
INCLUDE ("companyId", "csvUrl", "retryCount");
|
||||
|
||||
-- Import completion tracking
|
||||
CREATE INDEX CONCURRENTLY "SessionImport_companyId_processedAt_idx"
|
||||
ON "SessionImport" ("companyId", "processedAt")
|
||||
WHERE "processedAt" IS NOT NULL
|
||||
INCLUDE ("processingStatus", "totalSessions", "successfulSessions");
|
||||
@ -61,12 +61,20 @@ export const dashboardRouter = router({
|
||||
const sessions = await ctx.prisma.session.findMany({
|
||||
where,
|
||||
include: {
|
||||
import: {
|
||||
select: {
|
||||
externalSessionId: true,
|
||||
},
|
||||
},
|
||||
messages: {
|
||||
select: {
|
||||
id: true,
|
||||
sessionId: true,
|
||||
role: true,
|
||||
content: true,
|
||||
order: true,
|
||||
timestamp: true,
|
||||
createdAt: true,
|
||||
},
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
@ -88,7 +96,28 @@ export const dashboardRouter = router({
|
||||
|
||||
return {
|
||||
sessions: sessions.map((session) => ({
|
||||
...session,
|
||||
id: session.id,
|
||||
sessionId: session.import?.externalSessionId || session.id,
|
||||
companyId: session.companyId,
|
||||
userId: session.userId,
|
||||
category: session.category,
|
||||
language: session.language,
|
||||
country: session.country,
|
||||
ipAddress: session.ipAddress,
|
||||
sentiment: session.sentiment,
|
||||
messagesSent: session.messagesSent ?? undefined,
|
||||
startTime: session.startTime,
|
||||
endTime: session.endTime,
|
||||
createdAt: session.createdAt,
|
||||
updatedAt: session.updatedAt,
|
||||
avgResponseTime: session.avgResponseTime,
|
||||
escalated: session.escalated ?? undefined,
|
||||
forwardedHr: session.forwardedHr ?? undefined,
|
||||
initialMsg: session.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl ?? undefined,
|
||||
summary: session.summary ?? undefined,
|
||||
messages: session.messages,
|
||||
transcriptContent: null,
|
||||
questions: session.sessionQuestions.map((sq) => sq.question.content),
|
||||
})),
|
||||
pagination: {
|
||||
@ -112,7 +141,21 @@ export const dashboardRouter = router({
|
||||
companyId: ctx.company.id,
|
||||
},
|
||||
include: {
|
||||
import: {
|
||||
select: {
|
||||
externalSessionId: true,
|
||||
},
|
||||
},
|
||||
messages: {
|
||||
select: {
|
||||
id: true,
|
||||
sessionId: true,
|
||||
role: true,
|
||||
content: true,
|
||||
order: true,
|
||||
timestamp: true,
|
||||
createdAt: true,
|
||||
},
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
sessionQuestions: {
|
||||
@ -136,7 +179,28 @@ export const dashboardRouter = router({
|
||||
}
|
||||
|
||||
return {
|
||||
...session,
|
||||
id: session.id,
|
||||
sessionId: session.import?.externalSessionId || session.id,
|
||||
companyId: session.companyId,
|
||||
userId: session.userId,
|
||||
category: session.category,
|
||||
language: session.language,
|
||||
country: session.country,
|
||||
ipAddress: session.ipAddress,
|
||||
sentiment: session.sentiment,
|
||||
messagesSent: session.messagesSent ?? undefined,
|
||||
startTime: session.startTime,
|
||||
endTime: session.endTime,
|
||||
createdAt: session.createdAt,
|
||||
updatedAt: session.updatedAt,
|
||||
avgResponseTime: session.avgResponseTime,
|
||||
escalated: session.escalated ?? undefined,
|
||||
forwardedHr: session.forwardedHr ?? undefined,
|
||||
initialMsg: session.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl ?? undefined,
|
||||
summary: session.summary ?? undefined,
|
||||
messages: session.messages,
|
||||
transcriptContent: null,
|
||||
questions: session.sessionQuestions.map((sq) => sq.question.content),
|
||||
};
|
||||
}),
|
||||
|
||||
Reference in New Issue
Block a user