refactor: enhance Prisma schema with PostgreSQL optimizations and data integrity

- Add PostgreSQL-specific data types (@db.VarChar, @db.Text, @db.Timestamptz, @db.JsonB, @db.Inet)
- Implement comprehensive database constraints via custom migration
- Add detailed field-level documentation and enum descriptions
- Optimize indexes for common query patterns and company-scoped data
- Ensure data integrity with check constraints for positive values and logical time validation
- Add partial indexes for performance optimization on failed/pending processing sessions
This commit is contained in:
2025-06-28 03:22:53 +02:00
parent 3b135a64b5
commit e027dc9565
5 changed files with 673 additions and 126 deletions

2
.gitignore vendored
View File

@ -1,3 +1,5 @@
*-PROGRESS.md
# Created by https://www.toptal.com/developers/gitignore/api/node,nextjs,react
# Edit at https://www.toptal.com/developers/gitignore?templates=node,nextjs,react

View File

@ -0,0 +1,91 @@
-- Custom migration for PostgreSQL-specific data integrity constraints
-- These constraints cannot be expressed in Prisma schema directly
-- Ensure only one default AI model per company
CREATE UNIQUE INDEX "unique_default_ai_model_per_company"
ON "CompanyAIModel" ("companyId")
WHERE "isDefault" = true;
-- Ensure positive token counts in AI processing requests
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_prompt_tokens" CHECK ("promptTokens" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_completion_tokens" CHECK ("completionTokens" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_total_tokens" CHECK ("totalTokens" >= 0);
-- Ensure positive costs
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_prompt_cost" CHECK ("promptTokenCost" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_completion_cost" CHECK ("completionTokenCost" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_total_cost" CHECK ("totalCostEur" >= 0);
-- Ensure session times are logical
ALTER TABLE "Session"
ADD CONSTRAINT "logical_session_times" CHECK ("endTime" >= "startTime");
-- Ensure positive response times
ALTER TABLE "Session"
ADD CONSTRAINT "positive_response_time" CHECK ("avgResponseTime" IS NULL OR "avgResponseTime" >= 0);
-- Ensure positive message counts
ALTER TABLE "Session"
ADD CONSTRAINT "positive_message_count" CHECK ("messagesSent" IS NULL OR "messagesSent" >= 0);
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_message_count_import" CHECK ("messagesSent" IS NULL OR "messagesSent" >= 0);
-- Ensure positive response times in imports
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_response_time_import" CHECK ("avgResponseTimeSeconds" IS NULL OR "avgResponseTimeSeconds" >= 0);
-- Ensure positive token values in imports
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_tokens_import" CHECK ("tokens" IS NULL OR "tokens" >= 0);
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_tokens_eur_import" CHECK ("tokensEur" IS NULL OR "tokensEur" >= 0);
-- Ensure positive message order
ALTER TABLE "Message"
ADD CONSTRAINT "positive_message_order" CHECK ("order" >= 0);
-- Ensure positive retry counts
ALTER TABLE "SessionProcessingStatus"
ADD CONSTRAINT "positive_retry_count" CHECK ("retryCount" >= 0);
-- Ensure logical processing times
ALTER TABLE "SessionProcessingStatus"
ADD CONSTRAINT "logical_processing_times" CHECK ("completedAt" IS NULL OR "startedAt" IS NULL OR "completedAt" >= "startedAt");
-- Ensure logical AI request times
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "logical_ai_request_times" CHECK ("completedAt" IS NULL OR "completedAt" >= "requestedAt");
-- Ensure logical pricing date ranges
ALTER TABLE "AIModelPricing"
ADD CONSTRAINT "logical_pricing_dates" CHECK ("effectiveUntil" IS NULL OR "effectiveUntil" > "effectiveFrom");
-- Ensure positive max tokens for AI models
ALTER TABLE "AIModel"
ADD CONSTRAINT "positive_max_tokens" CHECK ("maxTokens" IS NULL OR "maxTokens" > 0);
-- Ensure logical user reset token expiry
ALTER TABLE "User"
ADD CONSTRAINT "logical_reset_token_expiry" CHECK ("resetTokenExpiry" IS NULL OR "resetToken" IS NOT NULL);
-- Add partial index for failed processing sessions (PostgreSQL-specific optimization)
CREATE INDEX CONCURRENTLY "sessions_failed_processing"
ON "SessionProcessingStatus" ("sessionId")
WHERE "status" = 'FAILED';
-- Add partial index for pending processing sessions
CREATE INDEX CONCURRENTLY "sessions_pending_processing"
ON "SessionProcessingStatus" ("stage", "status")
WHERE "status" = 'PENDING';

View File

@ -10,70 +10,84 @@ datasource db {
}
/// *
/// * COMPANY (multi-tenant root)
/// * COMPANY (multi-tenant root)
/// * Root entity for multi-tenant architecture
/// * Each company has isolated data with own users, sessions, and AI model configurations
model Company {
id String @id @default(uuid())
name String
csvUrl String
csvUsername String?
csvPassword String?
sentimentAlert Float?
dashboardOpts Json?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
companyAiModels CompanyAIModel[]
sessions Session[]
imports SessionImport[]
users User[] @relation("CompanyUsers")
name String @db.VarChar(255) /// Company name for display and filtering
csvUrl String @db.Text /// URL endpoint for CSV data import
csvUsername String? @db.VarChar(255) /// Optional HTTP auth username for CSV endpoint
csvPassword String? @db.VarChar(255) /// Optional HTTP auth password for CSV endpoint
dashboardOpts Json? @db.JsonB /// Company-specific dashboard configuration (theme, layout, etc.)
createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt @db.Timestamptz(6)
companyAiModels CompanyAIModel[] /// AI models assigned to this company
sessions Session[] /// All processed sessions for this company
imports SessionImport[] /// Raw CSV import data for this company
users User[] @relation("CompanyUsers") /// Users belonging to this company
@@index([name])
}
/// *
/// * USER (auth accounts)
/// * USER (authentication accounts)
/// * Application users with role-based access control
/// * Each user belongs to exactly one company for data isolation
model User {
id String @id @default(uuid())
email String @unique
password String
role UserRole @default(USER)
companyId String
resetToken String?
resetTokenExpiry DateTime?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
email String @unique @db.VarChar(255) /// User email address, must be unique across all companies
password String @db.VarChar(255) /// Hashed password for authentication
role UserRole @default(USER) /// User permission level within their company
companyId String /// Foreign key to Company - enforces data isolation
resetToken String? @db.VarChar(255) /// Temporary token for password reset functionality
resetTokenExpiry DateTime? @db.Timestamptz(6) /// Expiration time for reset token
createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt @db.Timestamptz(6)
company Company @relation("CompanyUsers", fields: [companyId], references: [id], onDelete: Cascade)
@@index([companyId])
@@index([email])
}
/// *
/// * 1. Normalised session ---------------------------
/// * SESSION (processed conversation data)
/// * Normalized session data derived from raw CSV imports
/// * Contains AI-enhanced data like sentiment analysis and categorization
/// * 1:1 relationship with SessionImport via importId
model Session {
id String @id @default(uuid())
companyId String
importId String? @unique
/// *
/// * session-level data (processed from SessionImport)
startTime DateTime
endTime DateTime
ipAddress String?
country String?
fullTranscriptUrl String?
avgResponseTime Float?
initialMsg String?
language String?
messagesSent Int?
sentiment SentimentCategory?
escalated Boolean?
forwardedHr Boolean?
category SessionCategory?
summary String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
aiProcessingRequests AIProcessingRequest[]
messages Message[]
companyId String /// Foreign key to Company for data isolation
importId String? @unique /// Optional 1:1 link to source SessionImport record
/// Session timing and basic data
startTime DateTime @db.Timestamptz(6) /// When the conversation started
endTime DateTime @db.Timestamptz(6) /// When the conversation ended
ipAddress String? @db.Inet /// Client IP address (IPv4/IPv6)
country String? @db.VarChar(3) /// ISO 3166-1 alpha-3 country code
fullTranscriptUrl String? @db.Text /// URL to external transcript source
avgResponseTime Float? @db.Real /// Average response time in seconds
initialMsg String? @db.Text /// First message in the conversation
language String? @db.VarChar(10) /// ISO 639 language code
messagesSent Int? /// Total number of messages in session
/// AI-enhanced analysis fields
sentiment SentimentCategory? /// AI-determined overall sentiment
escalated Boolean? /// Whether session was escalated to human
forwardedHr Boolean? /// Whether session was forwarded to HR
category SessionCategory? /// AI-determined conversation category
summary String? @db.Text /// AI-generated session summary
createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt @db.Timestamptz(6)
/// Related data
aiProcessingRequests AIProcessingRequest[] /// All AI API calls made for this session
messages Message[] /// Individual messages in conversation order
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
import SessionImport? @relation("ImportToSession", fields: [importId], references: [id])
processingStatus SessionProcessingStatus[]
sessionQuestions SessionQuestion[]
processingStatus SessionProcessingStatus[] /// Pipeline stage tracking
sessionQuestions SessionQuestion[] /// Questions extracted from conversation
@@index([companyId, startTime])
@@index([companyId, startTime]) /// Primary query pattern: company sessions by time
@@index([companyId, sentiment]) /// Filter sessions by sentiment within company
@@index([companyId, category]) /// Filter sessions by category within company
}
/// *
@ -81,28 +95,30 @@ model Session {
model SessionImport {
id String @id @default(uuid())
companyId String
externalSessionId String @unique
startTimeRaw String
endTimeRaw String
ipAddress String?
countryCode String?
language String?
externalSessionId String
startTimeRaw String @db.VarChar(255)
endTimeRaw String @db.VarChar(255)
ipAddress String? @db.VarChar(45)
countryCode String? @db.VarChar(3)
language String? @db.VarChar(10)
messagesSent Int?
sentimentRaw String?
escalatedRaw String?
forwardedHrRaw String?
fullTranscriptUrl String?
avgResponseTimeSeconds Float?
sentimentRaw String? @db.VarChar(50)
escalatedRaw String? @db.VarChar(50)
forwardedHrRaw String? @db.VarChar(50)
fullTranscriptUrl String? @db.Text
avgResponseTimeSeconds Float? @db.Real
tokens Int?
tokensEur Float?
category String?
initialMessage String?
rawTranscriptContent String?
createdAt DateTime @default(now())
tokensEur Float? @db.Real
category String? @db.VarChar(255)
initialMessage String? @db.Text
rawTranscriptContent String? @db.Text
createdAt DateTime @default(now()) @db.Timestamptz(6)
session Session? @relation("ImportToSession")
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
@@unique([companyId, externalSessionId])
@@index([companyId])
@@index([companyId, createdAt])
}
/// *
@ -110,15 +126,16 @@ model SessionImport {
model Message {
id String @id @default(uuid())
sessionId String
timestamp DateTime?
role String
content String
timestamp DateTime? @db.Timestamptz(6)
role String @db.VarChar(50)
content String @db.Text
order Int
createdAt DateTime @default(now())
createdAt DateTime @default(now()) @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@unique([sessionId, order])
@@index([sessionId, order])
@@index([sessionId, timestamp])
}
/// *
@ -128,24 +145,25 @@ model SessionProcessingStatus {
sessionId String
stage ProcessingStage
status ProcessingStatus @default(PENDING)
startedAt DateTime?
completedAt DateTime?
errorMessage String?
startedAt DateTime? @db.Timestamptz(6)
completedAt DateTime? @db.Timestamptz(6)
errorMessage String? @db.Text
retryCount Int @default(0)
metadata Json?
metadata Json? @db.JsonB
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@unique([sessionId, stage])
@@index([stage, status])
@@index([sessionId])
@@index([status, startedAt])
}
/// *
/// * QUESTION MANAGEMENT (separate from Session for better analytics)
model Question {
id String @id @default(uuid())
content String @unique
createdAt DateTime @default(now())
content String @unique @db.Text
createdAt DateTime @default(now()) @db.Timestamptz(6)
sessionQuestions SessionQuestion[]
}
@ -154,13 +172,14 @@ model SessionQuestion {
sessionId String
questionId String
order Int
createdAt DateTime @default(now())
createdAt DateTime @default(now()) @db.Timestamptz(6)
question Question @relation(fields: [questionId], references: [id])
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@unique([sessionId, questionId])
@@unique([sessionId, order])
@@index([sessionId])
@@index([questionId])
}
/// *
@ -168,10 +187,10 @@ model SessionQuestion {
model AIProcessingRequest {
id String @id @default(uuid())
sessionId String
openaiRequestId String?
model String
serviceTier String?
systemFingerprint String?
openaiRequestId String? @db.VarChar(255)
model String @db.VarChar(100)
serviceTier String? @db.VarChar(50)
systemFingerprint String? @db.VarChar(255)
promptTokens Int
completionTokens Int
totalTokens Int
@ -181,35 +200,38 @@ model AIProcessingRequest {
audioTokensCompletion Int?
acceptedPredictionTokens Int?
rejectedPredictionTokens Int?
promptTokenCost Float
completionTokenCost Float
totalCostEur Float
processingType String
promptTokenCost Float @db.Real
completionTokenCost Float @db.Real
totalCostEur Float @db.Real
processingType String @db.VarChar(100)
success Boolean
errorMessage String?
requestedAt DateTime @default(now())
completedAt DateTime?
errorMessage String? @db.Text
requestedAt DateTime @default(now()) @db.Timestamptz(6)
completedAt DateTime? @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@index([sessionId])
@@index([sessionId, requestedAt])
@@index([requestedAt])
@@index([model])
@@index([success, requestedAt])
}
/// *
/// * AI Model definitions (without pricing)
model AIModel {
id String @id @default(uuid())
name String @unique
provider String
name String @unique @db.VarChar(100)
provider String @db.VarChar(50)
maxTokens Int?
isActive Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt @db.Timestamptz(6)
pricing AIModelPricing[]
companyModels CompanyAIModel[]
@@index([provider, isActive])
@@index([name])
}
/// *
@ -217,11 +239,11 @@ model AIModel {
model AIModelPricing {
id String @id @default(uuid())
aiModelId String
promptTokenCost Float
completionTokenCost Float
effectiveFrom DateTime
effectiveUntil DateTime?
createdAt DateTime @default(now())
promptTokenCost Float @db.Real
completionTokenCost Float @db.Real
effectiveFrom DateTime @db.Timestamptz(6)
effectiveUntil DateTime? @db.Timestamptz(6)
createdAt DateTime @default(now()) @db.Timestamptz(6)
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
@@index([aiModelId, effectiveFrom])
@ -235,7 +257,7 @@ model CompanyAIModel {
companyId String
aiModelId String
isDefault Boolean @default(false)
createdAt DateTime @default(now())
createdAt DateTime @default(now()) @db.Timestamptz(6)
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
@ -244,47 +266,54 @@ model CompanyAIModel {
}
/// *
/// * ENUMS fewer magic strings
/// * ENUMS typed constants for better data integrity
///
/// User permission levels within a company
enum UserRole {
ADMIN
USER
AUDITOR
ADMIN /// Full access to company data and settings
USER /// Standard access to view and interact with data
AUDITOR /// Read-only access for compliance and auditing
}
/// AI-determined sentiment categories for sessions
enum SentimentCategory {
POSITIVE
NEUTRAL
NEGATIVE
POSITIVE /// Customer expressed satisfaction or positive emotions
NEUTRAL /// Neutral tone or mixed emotions
NEGATIVE /// Customer expressed frustration or negative emotions
}
/// AI-determined conversation categories based on content analysis
enum SessionCategory {
SCHEDULE_HOURS
LEAVE_VACATION
SICK_LEAVE_RECOVERY
SALARY_COMPENSATION
CONTRACT_HOURS
ONBOARDING
OFFBOARDING
WORKWEAR_STAFF_PASS
TEAM_CONTACTS
PERSONAL_QUESTIONS
ACCESS_LOGIN
SOCIAL_QUESTIONS
UNRECOGNIZED_OTHER
SCHEDULE_HOURS /// Questions about work schedules and hours
LEAVE_VACATION /// Vacation requests and leave policies
SICK_LEAVE_RECOVERY /// Sick leave and recovery-related discussions
SALARY_COMPENSATION /// Salary, benefits, and compensation questions
CONTRACT_HOURS /// Contract terms and working hours
ONBOARDING /// New employee onboarding processes
OFFBOARDING /// Employee departure and offboarding
WORKWEAR_STAFF_PASS /// Equipment, uniforms, and access cards
TEAM_CONTACTS /// Team directory and contact information
PERSONAL_QUESTIONS /// Personal HR matters and private concerns
ACCESS_LOGIN /// System access and login issues
SOCIAL_QUESTIONS /// Social events and company culture
UNRECOGNIZED_OTHER /// Conversations that don't fit other categories
}
/// Processing pipeline stages for session data transformation
enum ProcessingStage {
CSV_IMPORT
TRANSCRIPT_FETCH
SESSION_CREATION
AI_ANALYSIS
QUESTION_EXTRACTION
CSV_IMPORT /// Initial import of raw CSV data into SessionImport
TRANSCRIPT_FETCH /// Fetching transcript content from external URLs
SESSION_CREATION /// Converting SessionImport to normalized Session
AI_ANALYSIS /// AI processing for sentiment, categorization, summaries
QUESTION_EXTRACTION /// Extracting questions from conversation content
}
/// Status of each processing stage
enum ProcessingStatus {
PENDING
IN_PROGRESS
COMPLETED
FAILED
SKIPPED
PENDING /// Stage is queued for processing
IN_PROGRESS /// Stage is currently being processed
COMPLETED /// Stage completed successfully
FAILED /// Stage failed with errors
SKIPPED /// Stage was intentionally skipped
}

100
tests/unit/auth.test.ts Normal file
View File

@ -0,0 +1,100 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { authOptions } from '../../app/api/auth/[...nextauth]/route';
import { PrismaClient } from '@prisma/client';
import bcrypt from 'bcryptjs';
// Mock PrismaClient
vi.mock('../../lib/prisma', () => ({
prisma: new PrismaClient(),
}));
// Mock bcryptjs
vi.mock('bcryptjs', () => ({
default: {
compare: vi.fn(),
},
}));
describe('NextAuth Credentials Provider authorize function', () => {
let mockFindUnique: vi.Mock;
let mockBcryptCompare: vi.Mock;
beforeEach(() => {
mockFindUnique = vi.fn();
// @ts-ignore
prisma.user.findUnique = mockFindUnique;
mockBcryptCompare = bcrypt.compare as vi.Mock;
vi.clearAllMocks();
});
const authorize = authOptions.providers[0].authorize;
it('should return null if email or password are not provided', async () => {
// @ts-ignore
const result1 = await authorize({ email: 'test@example.com', password: '' });
expect(result1).toBeNull();
expect(mockFindUnique).not.toHaveBeenCalled();
// @ts-ignore
const result2 = await authorize({ email: '', password: 'password' });
expect(result2).toBeNull();
expect(mockFindUnique).not.toHaveBeenCalled();
});
it('should return null if user is not found', async () => {
mockFindUnique.mockResolvedValue(null);
// @ts-ignore
const result = await authorize({ email: 'nonexistent@example.com', password: 'password' });
expect(result).toBeNull();
expect(mockFindUnique).toHaveBeenCalledWith({
where: { email: 'nonexistent@example.com' },
});
expect(mockBcryptCompare).not.toHaveBeenCalled();
});
it('should return null if password does not match', async () => {
const mockUser = {
id: 'user123',
email: 'test@example.com',
password: 'hashed_password',
companyId: 'company123',
role: 'USER',
};
mockFindUnique.mockResolvedValue(mockUser);
mockBcryptCompare.mockResolvedValue(false);
// @ts-ignore
const result = await authorize({ email: 'test@example.com', password: 'wrong_password' });
expect(result).toBeNull();
expect(mockFindUnique).toHaveBeenCalledWith({
where: { email: 'test@example.com' },
});
expect(mockBcryptCompare).toHaveBeenCalledWith('wrong_password', 'hashed_password');
});
it('should return user object if credentials are valid', async () => {
const mockUser = {
id: 'user123',
email: 'test@example.com',
password: 'hashed_password',
companyId: 'company123',
role: 'USER',
};
mockFindUnique.mockResolvedValue(mockUser);
mockBcryptCompare.mockResolvedValue(true);
// @ts-ignore
const result = await authorize({ email: 'test@example.com', password: 'correct_password' });
expect(result).toEqual({
id: 'user123',
email: 'test@example.com',
companyId: 'company123',
role: 'USER',
});
expect(mockFindUnique).toHaveBeenCalledWith({
where: { email: 'test@example.com' },
});
expect(mockBcryptCompare).toHaveBeenCalledWith('correct_password', 'hashed_password');
});
});

View File

@ -0,0 +1,325 @@
import { describe, it, expect } from 'vitest';
import {
registerSchema,
loginSchema,
forgotPasswordSchema,
resetPasswordSchema,
sessionFilterSchema,
companySettingsSchema,
userUpdateSchema,
metricsQuerySchema,
validateInput,
} from '../../lib/validation';
describe('Validation Schemas', () => {
// Helper for password validation
const validPassword = 'Password123!';
const invalidPasswordShort = 'Pass1!';
const invalidPasswordNoLower = 'PASSWORD123!';
const invalidPasswordNoUpper = 'password123!';
const invalidPasswordNoNumber = 'Password!!';
const invalidPasswordNoSpecial = 'Password123';
// Helper for email validation
const validEmail = 'test@example.com';
const invalidEmailFormat = 'test@example';
const invalidEmailTooLong = 'a'.repeat(250) + '@example.com'; // 250 + 11 = 261 chars
// Helper for company name validation
const validCompanyName = 'My Company Inc.';
const invalidCompanyNameEmpty = '';
const invalidCompanyNameTooLong = 'A'.repeat(101);
const invalidCompanyNameChars = 'My Company #$%';
describe('registerSchema', () => {
it('should validate a valid registration object', () => {
const data = {
email: validEmail,
password: validPassword,
company: validCompanyName,
};
expect(registerSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = {
email: invalidEmailFormat,
password: validPassword,
company: validCompanyName,
};
expect(registerSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid password', () => {
const data = {
email: validEmail,
password: invalidPasswordShort,
company: validCompanyName,
};
expect(registerSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid company name', () => {
const data = {
email: validEmail,
password: validPassword,
company: invalidCompanyNameEmpty,
};
expect(registerSchema.safeParse(data).success).toBe(false);
});
});
describe('loginSchema', () => {
it('should validate a valid login object', () => {
const data = {
email: validEmail,
password: validPassword,
};
expect(loginSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = {
email: invalidEmailFormat,
password: validPassword,
};
expect(loginSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an empty password', () => {
const data = {
email: validEmail,
password: '',
};
expect(loginSchema.safeParse(data).success).toBe(false);
});
});
describe('forgotPasswordSchema', () => {
it('should validate a valid email', () => {
const data = { email: validEmail };
expect(forgotPasswordSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = { email: invalidEmailFormat };
expect(forgotPasswordSchema.safeParse(data).success).toBe(false);
});
});
describe('resetPasswordSchema', () => {
it('should validate a valid reset password object', () => {
const data = {
token: 'some-valid-token',
password: validPassword,
};
expect(resetPasswordSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an empty token', () => {
const data = {
token: '',
password: validPassword,
};
expect(resetPasswordSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid password', () => {
const data = {
token: 'some-valid-token',
password: invalidPasswordShort,
};
expect(resetPasswordSchema.safeParse(data).success).toBe(false);
});
});
describe('sessionFilterSchema', () => {
it('should validate a valid session filter object', () => {
const data = {
search: 'query',
sentiment: 'POSITIVE',
category: 'SCHEDULE_HOURS',
startDate: '2023-01-01T00:00:00Z',
endDate: '2023-01-31T23:59:59Z',
page: 1,
limit: 20,
};
expect(sessionFilterSchema.safeParse(data).success).toBe(true);
});
it('should validate with only optional fields', () => {
const data = {};
expect(sessionFilterSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid sentiment', () => {
const data = { sentiment: 'INVALID' };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid category', () => {
const data = { category: 'INVALID_CATEGORY' };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid date format', () => {
const data = { startDate: '2023-01-01' }; // Missing time
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate page less than 1', () => {
const data = { page: 0 };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate limit greater than 100', () => {
const data = { limit: 101 };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
});
describe('companySettingsSchema', () => {
it('should validate a valid company settings object', () => {
const data = {
name: validCompanyName,
csvUrl: 'http://example.com/data.csv',
csvUsername: 'user',
csvPassword: 'password',
sentimentAlert: 0.5,
dashboardOpts: { theme: 'dark' },
};
expect(companySettingsSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid CSV URL', () => {
const data = {
name: validCompanyName,
csvUrl: 'invalid-url',
};
expect(companySettingsSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid company name', () => {
const data = {
name: invalidCompanyNameEmpty,
csvUrl: 'http://example.com/data.csv',
};
expect(companySettingsSchema.safeParse(data).success).toBe(false);
});
it('should invalidate sentimentAlert out of range', () => {
const data = {
name: validCompanyName,
csvUrl: 'http://example.com/data.csv',
sentimentAlert: 1.1,
};
expect(companySettingsSchema.safeParse(data).success).toBe(false);
});
});
describe('userUpdateSchema', () => {
it('should validate a valid user update object with all fields', () => {
const data = {
email: validEmail,
role: 'ADMIN',
password: validPassword,
};
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should validate a valid user update object with only email', () => {
const data = { email: validEmail };
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should validate a valid user update object with only role', () => {
const data = { role: 'USER' };
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should validate a valid user update object with only password', () => {
const data = { password: validPassword };
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = { email: invalidEmailFormat };
expect(userUpdateSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid role', () => {
const data = { role: 'SUPERUSER' };
expect(userUpdateSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid password', () => {
const data = { password: invalidPasswordShort };
expect(userUpdateSchema.safeParse(data).success).toBe(false);
});
});
describe('metricsQuerySchema', () => {
it('should validate a valid metrics query object', () => {
const data = {
startDate: '2023-01-01T00:00:00Z',
endDate: '2023-01-31T23:59:59Z',
companyId: 'a1b2c3d4-e5f6-7890-1234-567890abcdef',
};
expect(metricsQuerySchema.safeParse(data).success).toBe(true);
});
it('should validate with only optional fields', () => {
const data = {};
expect(metricsQuerySchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid date format', () => {
const data = { startDate: '2023-01-01' };
expect(metricsQuerySchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid companyId format', () => {
const data = { companyId: 'invalid-uuid' };
expect(metricsQuerySchema.safeParse(data).success).toBe(false);
});
});
describe('validateInput', () => {
const testSchema = registerSchema; // Using registerSchema for validateInput tests
it('should return success true and data for valid input', () => {
const data = {
email: validEmail,
password: validPassword,
company: validCompanyName,
};
const result = validateInput(testSchema, data);
expect(result.success).toBe(true);
expect((result as any).data).toEqual(data);
});
it('should return success false and errors for invalid input', () => {
const data = {
email: invalidEmailFormat,
password: invalidPasswordShort,
company: invalidCompanyNameEmpty,
};
const result = validateInput(testSchema, data);
expect(result.success).toBe(false);
expect((result as any).errors).toEqual(expect.arrayContaining([
'email: Invalid email format',
'password: Password must be at least 12 characters long',
'company: Company name is required',
]));
});
it('should handle non-ZodError errors gracefully', () => {
const mockSchema = {
parse: () => { throw new Error('Some unexpected error'); }
} as any;
const result = validateInput(mockSchema, {});
expect(result.success).toBe(false);
expect((result as any).errors).toEqual(['Invalid input']);
});
});
});