refactor: enhance Prisma schema with PostgreSQL optimizations and data integrity

- Add PostgreSQL-specific data types (@db.VarChar, @db.Text, @db.Timestamptz, @db.JsonB, @db.Inet)
- Implement comprehensive database constraints via custom migration
- Add detailed field-level documentation and enum descriptions
- Optimize indexes for common query patterns and company-scoped data
- Ensure data integrity with check constraints for positive values and logical time validation
- Add partial indexes for performance optimization on failed/pending processing sessions
This commit is contained in:
2025-06-28 03:22:53 +02:00
parent 3b135a64b5
commit e027dc9565
5 changed files with 673 additions and 126 deletions

2
.gitignore vendored
View File

@ -1,3 +1,5 @@
*-PROGRESS.md
# Created by https://www.toptal.com/developers/gitignore/api/node,nextjs,react # Created by https://www.toptal.com/developers/gitignore/api/node,nextjs,react
# Edit at https://www.toptal.com/developers/gitignore?templates=node,nextjs,react # Edit at https://www.toptal.com/developers/gitignore?templates=node,nextjs,react

View File

@ -0,0 +1,91 @@
-- Custom migration for PostgreSQL-specific data integrity constraints
-- These constraints cannot be expressed in Prisma schema directly
-- Ensure only one default AI model per company
CREATE UNIQUE INDEX "unique_default_ai_model_per_company"
ON "CompanyAIModel" ("companyId")
WHERE "isDefault" = true;
-- Ensure positive token counts in AI processing requests
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_prompt_tokens" CHECK ("promptTokens" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_completion_tokens" CHECK ("completionTokens" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_total_tokens" CHECK ("totalTokens" >= 0);
-- Ensure positive costs
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_prompt_cost" CHECK ("promptTokenCost" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_completion_cost" CHECK ("completionTokenCost" >= 0);
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "positive_total_cost" CHECK ("totalCostEur" >= 0);
-- Ensure session times are logical
ALTER TABLE "Session"
ADD CONSTRAINT "logical_session_times" CHECK ("endTime" >= "startTime");
-- Ensure positive response times
ALTER TABLE "Session"
ADD CONSTRAINT "positive_response_time" CHECK ("avgResponseTime" IS NULL OR "avgResponseTime" >= 0);
-- Ensure positive message counts
ALTER TABLE "Session"
ADD CONSTRAINT "positive_message_count" CHECK ("messagesSent" IS NULL OR "messagesSent" >= 0);
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_message_count_import" CHECK ("messagesSent" IS NULL OR "messagesSent" >= 0);
-- Ensure positive response times in imports
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_response_time_import" CHECK ("avgResponseTimeSeconds" IS NULL OR "avgResponseTimeSeconds" >= 0);
-- Ensure positive token values in imports
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_tokens_import" CHECK ("tokens" IS NULL OR "tokens" >= 0);
ALTER TABLE "SessionImport"
ADD CONSTRAINT "positive_tokens_eur_import" CHECK ("tokensEur" IS NULL OR "tokensEur" >= 0);
-- Ensure positive message order
ALTER TABLE "Message"
ADD CONSTRAINT "positive_message_order" CHECK ("order" >= 0);
-- Ensure positive retry counts
ALTER TABLE "SessionProcessingStatus"
ADD CONSTRAINT "positive_retry_count" CHECK ("retryCount" >= 0);
-- Ensure logical processing times
ALTER TABLE "SessionProcessingStatus"
ADD CONSTRAINT "logical_processing_times" CHECK ("completedAt" IS NULL OR "startedAt" IS NULL OR "completedAt" >= "startedAt");
-- Ensure logical AI request times
ALTER TABLE "AIProcessingRequest"
ADD CONSTRAINT "logical_ai_request_times" CHECK ("completedAt" IS NULL OR "completedAt" >= "requestedAt");
-- Ensure logical pricing date ranges
ALTER TABLE "AIModelPricing"
ADD CONSTRAINT "logical_pricing_dates" CHECK ("effectiveUntil" IS NULL OR "effectiveUntil" > "effectiveFrom");
-- Ensure positive max tokens for AI models
ALTER TABLE "AIModel"
ADD CONSTRAINT "positive_max_tokens" CHECK ("maxTokens" IS NULL OR "maxTokens" > 0);
-- Ensure logical user reset token expiry
ALTER TABLE "User"
ADD CONSTRAINT "logical_reset_token_expiry" CHECK ("resetTokenExpiry" IS NULL OR "resetToken" IS NOT NULL);
-- Add partial index for failed processing sessions (PostgreSQL-specific optimization)
CREATE INDEX CONCURRENTLY "sessions_failed_processing"
ON "SessionProcessingStatus" ("sessionId")
WHERE "status" = 'FAILED';
-- Add partial index for pending processing sessions
CREATE INDEX CONCURRENTLY "sessions_pending_processing"
ON "SessionProcessingStatus" ("stage", "status")
WHERE "status" = 'PENDING';

View File

@ -11,69 +11,83 @@ datasource db {
/// * /// *
/// * COMPANY (multi-tenant root) /// * COMPANY (multi-tenant root)
/// * Root entity for multi-tenant architecture
/// * Each company has isolated data with own users, sessions, and AI model configurations
model Company { model Company {
id String @id @default(uuid()) id String @id @default(uuid())
name String name String @db.VarChar(255) /// Company name for display and filtering
csvUrl String csvUrl String @db.Text /// URL endpoint for CSV data import
csvUsername String? csvUsername String? @db.VarChar(255) /// Optional HTTP auth username for CSV endpoint
csvPassword String? csvPassword String? @db.VarChar(255) /// Optional HTTP auth password for CSV endpoint
sentimentAlert Float? dashboardOpts Json? @db.JsonB /// Company-specific dashboard configuration (theme, layout, etc.)
dashboardOpts Json? createdAt DateTime @default(now()) @db.Timestamptz(6)
createdAt DateTime @default(now()) updatedAt DateTime @updatedAt @db.Timestamptz(6)
updatedAt DateTime @updatedAt companyAiModels CompanyAIModel[] /// AI models assigned to this company
companyAiModels CompanyAIModel[] sessions Session[] /// All processed sessions for this company
sessions Session[] imports SessionImport[] /// Raw CSV import data for this company
imports SessionImport[] users User[] @relation("CompanyUsers") /// Users belonging to this company
users User[] @relation("CompanyUsers")
@@index([name])
} }
/// * /// *
/// * USER (auth accounts) /// * USER (authentication accounts)
/// * Application users with role-based access control
/// * Each user belongs to exactly one company for data isolation
model User { model User {
id String @id @default(uuid()) id String @id @default(uuid())
email String @unique email String @unique @db.VarChar(255) /// User email address, must be unique across all companies
password String password String @db.VarChar(255) /// Hashed password for authentication
role UserRole @default(USER) role UserRole @default(USER) /// User permission level within their company
companyId String companyId String /// Foreign key to Company - enforces data isolation
resetToken String? resetToken String? @db.VarChar(255) /// Temporary token for password reset functionality
resetTokenExpiry DateTime? resetTokenExpiry DateTime? @db.Timestamptz(6) /// Expiration time for reset token
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt @db.Timestamptz(6)
company Company @relation("CompanyUsers", fields: [companyId], references: [id], onDelete: Cascade) company Company @relation("CompanyUsers", fields: [companyId], references: [id], onDelete: Cascade)
@@index([companyId])
@@index([email])
} }
/// * /// *
/// * 1. Normalised session --------------------------- /// * SESSION (processed conversation data)
/// * Normalized session data derived from raw CSV imports
/// * Contains AI-enhanced data like sentiment analysis and categorization
/// * 1:1 relationship with SessionImport via importId
model Session { model Session {
id String @id @default(uuid()) id String @id @default(uuid())
companyId String companyId String /// Foreign key to Company for data isolation
importId String? @unique importId String? @unique /// Optional 1:1 link to source SessionImport record
/// * /// Session timing and basic data
/// * session-level data (processed from SessionImport) startTime DateTime @db.Timestamptz(6) /// When the conversation started
startTime DateTime endTime DateTime @db.Timestamptz(6) /// When the conversation ended
endTime DateTime ipAddress String? @db.Inet /// Client IP address (IPv4/IPv6)
ipAddress String? country String? @db.VarChar(3) /// ISO 3166-1 alpha-3 country code
country String? fullTranscriptUrl String? @db.Text /// URL to external transcript source
fullTranscriptUrl String? avgResponseTime Float? @db.Real /// Average response time in seconds
avgResponseTime Float? initialMsg String? @db.Text /// First message in the conversation
initialMsg String? language String? @db.VarChar(10) /// ISO 639 language code
language String? messagesSent Int? /// Total number of messages in session
messagesSent Int? /// AI-enhanced analysis fields
sentiment SentimentCategory? sentiment SentimentCategory? /// AI-determined overall sentiment
escalated Boolean? escalated Boolean? /// Whether session was escalated to human
forwardedHr Boolean? forwardedHr Boolean? /// Whether session was forwarded to HR
category SessionCategory? category SessionCategory? /// AI-determined conversation category
summary String? summary String? @db.Text /// AI-generated session summary
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt @db.Timestamptz(6)
aiProcessingRequests AIProcessingRequest[] /// Related data
messages Message[] aiProcessingRequests AIProcessingRequest[] /// All AI API calls made for this session
messages Message[] /// Individual messages in conversation order
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade) company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
import SessionImport? @relation("ImportToSession", fields: [importId], references: [id]) import SessionImport? @relation("ImportToSession", fields: [importId], references: [id])
processingStatus SessionProcessingStatus[] processingStatus SessionProcessingStatus[] /// Pipeline stage tracking
sessionQuestions SessionQuestion[] sessionQuestions SessionQuestion[] /// Questions extracted from conversation
@@index([companyId, startTime]) @@index([companyId, startTime]) /// Primary query pattern: company sessions by time
@@index([companyId, sentiment]) /// Filter sessions by sentiment within company
@@index([companyId, category]) /// Filter sessions by category within company
} }
/// * /// *
@ -81,28 +95,30 @@ model Session {
model SessionImport { model SessionImport {
id String @id @default(uuid()) id String @id @default(uuid())
companyId String companyId String
externalSessionId String @unique externalSessionId String
startTimeRaw String startTimeRaw String @db.VarChar(255)
endTimeRaw String endTimeRaw String @db.VarChar(255)
ipAddress String? ipAddress String? @db.VarChar(45)
countryCode String? countryCode String? @db.VarChar(3)
language String? language String? @db.VarChar(10)
messagesSent Int? messagesSent Int?
sentimentRaw String? sentimentRaw String? @db.VarChar(50)
escalatedRaw String? escalatedRaw String? @db.VarChar(50)
forwardedHrRaw String? forwardedHrRaw String? @db.VarChar(50)
fullTranscriptUrl String? fullTranscriptUrl String? @db.Text
avgResponseTimeSeconds Float? avgResponseTimeSeconds Float? @db.Real
tokens Int? tokens Int?
tokensEur Float? tokensEur Float? @db.Real
category String? category String? @db.VarChar(255)
initialMessage String? initialMessage String? @db.Text
rawTranscriptContent String? rawTranscriptContent String? @db.Text
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
session Session? @relation("ImportToSession") session Session? @relation("ImportToSession")
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade) company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
@@unique([companyId, externalSessionId]) @@unique([companyId, externalSessionId])
@@index([companyId])
@@index([companyId, createdAt])
} }
/// * /// *
@ -110,15 +126,16 @@ model SessionImport {
model Message { model Message {
id String @id @default(uuid()) id String @id @default(uuid())
sessionId String sessionId String
timestamp DateTime? timestamp DateTime? @db.Timestamptz(6)
role String role String @db.VarChar(50)
content String content String @db.Text
order Int order Int
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade) session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@unique([sessionId, order]) @@unique([sessionId, order])
@@index([sessionId, order]) @@index([sessionId, order])
@@index([sessionId, timestamp])
} }
/// * /// *
@ -128,24 +145,25 @@ model SessionProcessingStatus {
sessionId String sessionId String
stage ProcessingStage stage ProcessingStage
status ProcessingStatus @default(PENDING) status ProcessingStatus @default(PENDING)
startedAt DateTime? startedAt DateTime? @db.Timestamptz(6)
completedAt DateTime? completedAt DateTime? @db.Timestamptz(6)
errorMessage String? errorMessage String? @db.Text
retryCount Int @default(0) retryCount Int @default(0)
metadata Json? metadata Json? @db.JsonB
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade) session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@unique([sessionId, stage]) @@unique([sessionId, stage])
@@index([stage, status]) @@index([stage, status])
@@index([sessionId]) @@index([sessionId])
@@index([status, startedAt])
} }
/// * /// *
/// * QUESTION MANAGEMENT (separate from Session for better analytics) /// * QUESTION MANAGEMENT (separate from Session for better analytics)
model Question { model Question {
id String @id @default(uuid()) id String @id @default(uuid())
content String @unique content String @unique @db.Text
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
sessionQuestions SessionQuestion[] sessionQuestions SessionQuestion[]
} }
@ -154,13 +172,14 @@ model SessionQuestion {
sessionId String sessionId String
questionId String questionId String
order Int order Int
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
question Question @relation(fields: [questionId], references: [id]) question Question @relation(fields: [questionId], references: [id])
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade) session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@unique([sessionId, questionId]) @@unique([sessionId, questionId])
@@unique([sessionId, order]) @@unique([sessionId, order])
@@index([sessionId]) @@index([sessionId])
@@index([questionId])
} }
/// * /// *
@ -168,10 +187,10 @@ model SessionQuestion {
model AIProcessingRequest { model AIProcessingRequest {
id String @id @default(uuid()) id String @id @default(uuid())
sessionId String sessionId String
openaiRequestId String? openaiRequestId String? @db.VarChar(255)
model String model String @db.VarChar(100)
serviceTier String? serviceTier String? @db.VarChar(50)
systemFingerprint String? systemFingerprint String? @db.VarChar(255)
promptTokens Int promptTokens Int
completionTokens Int completionTokens Int
totalTokens Int totalTokens Int
@ -181,35 +200,38 @@ model AIProcessingRequest {
audioTokensCompletion Int? audioTokensCompletion Int?
acceptedPredictionTokens Int? acceptedPredictionTokens Int?
rejectedPredictionTokens Int? rejectedPredictionTokens Int?
promptTokenCost Float promptTokenCost Float @db.Real
completionTokenCost Float completionTokenCost Float @db.Real
totalCostEur Float totalCostEur Float @db.Real
processingType String processingType String @db.VarChar(100)
success Boolean success Boolean
errorMessage String? errorMessage String? @db.Text
requestedAt DateTime @default(now()) requestedAt DateTime @default(now()) @db.Timestamptz(6)
completedAt DateTime? completedAt DateTime? @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade) session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@index([sessionId]) @@index([sessionId])
@@index([sessionId, requestedAt])
@@index([requestedAt]) @@index([requestedAt])
@@index([model]) @@index([model])
@@index([success, requestedAt])
} }
/// * /// *
/// * AI Model definitions (without pricing) /// * AI Model definitions (without pricing)
model AIModel { model AIModel {
id String @id @default(uuid()) id String @id @default(uuid())
name String @unique name String @unique @db.VarChar(100)
provider String provider String @db.VarChar(50)
maxTokens Int? maxTokens Int?
isActive Boolean @default(true) isActive Boolean @default(true)
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt @db.Timestamptz(6)
pricing AIModelPricing[] pricing AIModelPricing[]
companyModels CompanyAIModel[] companyModels CompanyAIModel[]
@@index([provider, isActive]) @@index([provider, isActive])
@@index([name])
} }
/// * /// *
@ -217,11 +239,11 @@ model AIModel {
model AIModelPricing { model AIModelPricing {
id String @id @default(uuid()) id String @id @default(uuid())
aiModelId String aiModelId String
promptTokenCost Float promptTokenCost Float @db.Real
completionTokenCost Float completionTokenCost Float @db.Real
effectiveFrom DateTime effectiveFrom DateTime @db.Timestamptz(6)
effectiveUntil DateTime? effectiveUntil DateTime? @db.Timestamptz(6)
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade) aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
@@index([aiModelId, effectiveFrom]) @@index([aiModelId, effectiveFrom])
@ -235,7 +257,7 @@ model CompanyAIModel {
companyId String companyId String
aiModelId String aiModelId String
isDefault Boolean @default(false) isDefault Boolean @default(false)
createdAt DateTime @default(now()) createdAt DateTime @default(now()) @db.Timestamptz(6)
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade) aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade) company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
@ -244,47 +266,54 @@ model CompanyAIModel {
} }
/// * /// *
/// * ENUMS fewer magic strings /// * ENUMS typed constants for better data integrity
///
/// User permission levels within a company
enum UserRole { enum UserRole {
ADMIN ADMIN /// Full access to company data and settings
USER USER /// Standard access to view and interact with data
AUDITOR AUDITOR /// Read-only access for compliance and auditing
} }
/// AI-determined sentiment categories for sessions
enum SentimentCategory { enum SentimentCategory {
POSITIVE POSITIVE /// Customer expressed satisfaction or positive emotions
NEUTRAL NEUTRAL /// Neutral tone or mixed emotions
NEGATIVE NEGATIVE /// Customer expressed frustration or negative emotions
} }
/// AI-determined conversation categories based on content analysis
enum SessionCategory { enum SessionCategory {
SCHEDULE_HOURS SCHEDULE_HOURS /// Questions about work schedules and hours
LEAVE_VACATION LEAVE_VACATION /// Vacation requests and leave policies
SICK_LEAVE_RECOVERY SICK_LEAVE_RECOVERY /// Sick leave and recovery-related discussions
SALARY_COMPENSATION SALARY_COMPENSATION /// Salary, benefits, and compensation questions
CONTRACT_HOURS CONTRACT_HOURS /// Contract terms and working hours
ONBOARDING ONBOARDING /// New employee onboarding processes
OFFBOARDING OFFBOARDING /// Employee departure and offboarding
WORKWEAR_STAFF_PASS WORKWEAR_STAFF_PASS /// Equipment, uniforms, and access cards
TEAM_CONTACTS TEAM_CONTACTS /// Team directory and contact information
PERSONAL_QUESTIONS PERSONAL_QUESTIONS /// Personal HR matters and private concerns
ACCESS_LOGIN ACCESS_LOGIN /// System access and login issues
SOCIAL_QUESTIONS SOCIAL_QUESTIONS /// Social events and company culture
UNRECOGNIZED_OTHER UNRECOGNIZED_OTHER /// Conversations that don't fit other categories
} }
/// Processing pipeline stages for session data transformation
enum ProcessingStage { enum ProcessingStage {
CSV_IMPORT CSV_IMPORT /// Initial import of raw CSV data into SessionImport
TRANSCRIPT_FETCH TRANSCRIPT_FETCH /// Fetching transcript content from external URLs
SESSION_CREATION SESSION_CREATION /// Converting SessionImport to normalized Session
AI_ANALYSIS AI_ANALYSIS /// AI processing for sentiment, categorization, summaries
QUESTION_EXTRACTION QUESTION_EXTRACTION /// Extracting questions from conversation content
} }
/// Status of each processing stage
enum ProcessingStatus { enum ProcessingStatus {
PENDING PENDING /// Stage is queued for processing
IN_PROGRESS IN_PROGRESS /// Stage is currently being processed
COMPLETED COMPLETED /// Stage completed successfully
FAILED FAILED /// Stage failed with errors
SKIPPED SKIPPED /// Stage was intentionally skipped
} }

100
tests/unit/auth.test.ts Normal file
View File

@ -0,0 +1,100 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { authOptions } from '../../app/api/auth/[...nextauth]/route';
import { PrismaClient } from '@prisma/client';
import bcrypt from 'bcryptjs';
// Mock PrismaClient
vi.mock('../../lib/prisma', () => ({
prisma: new PrismaClient(),
}));
// Mock bcryptjs
vi.mock('bcryptjs', () => ({
default: {
compare: vi.fn(),
},
}));
describe('NextAuth Credentials Provider authorize function', () => {
let mockFindUnique: vi.Mock;
let mockBcryptCompare: vi.Mock;
beforeEach(() => {
mockFindUnique = vi.fn();
// @ts-ignore
prisma.user.findUnique = mockFindUnique;
mockBcryptCompare = bcrypt.compare as vi.Mock;
vi.clearAllMocks();
});
const authorize = authOptions.providers[0].authorize;
it('should return null if email or password are not provided', async () => {
// @ts-ignore
const result1 = await authorize({ email: 'test@example.com', password: '' });
expect(result1).toBeNull();
expect(mockFindUnique).not.toHaveBeenCalled();
// @ts-ignore
const result2 = await authorize({ email: '', password: 'password' });
expect(result2).toBeNull();
expect(mockFindUnique).not.toHaveBeenCalled();
});
it('should return null if user is not found', async () => {
mockFindUnique.mockResolvedValue(null);
// @ts-ignore
const result = await authorize({ email: 'nonexistent@example.com', password: 'password' });
expect(result).toBeNull();
expect(mockFindUnique).toHaveBeenCalledWith({
where: { email: 'nonexistent@example.com' },
});
expect(mockBcryptCompare).not.toHaveBeenCalled();
});
it('should return null if password does not match', async () => {
const mockUser = {
id: 'user123',
email: 'test@example.com',
password: 'hashed_password',
companyId: 'company123',
role: 'USER',
};
mockFindUnique.mockResolvedValue(mockUser);
mockBcryptCompare.mockResolvedValue(false);
// @ts-ignore
const result = await authorize({ email: 'test@example.com', password: 'wrong_password' });
expect(result).toBeNull();
expect(mockFindUnique).toHaveBeenCalledWith({
where: { email: 'test@example.com' },
});
expect(mockBcryptCompare).toHaveBeenCalledWith('wrong_password', 'hashed_password');
});
it('should return user object if credentials are valid', async () => {
const mockUser = {
id: 'user123',
email: 'test@example.com',
password: 'hashed_password',
companyId: 'company123',
role: 'USER',
};
mockFindUnique.mockResolvedValue(mockUser);
mockBcryptCompare.mockResolvedValue(true);
// @ts-ignore
const result = await authorize({ email: 'test@example.com', password: 'correct_password' });
expect(result).toEqual({
id: 'user123',
email: 'test@example.com',
companyId: 'company123',
role: 'USER',
});
expect(mockFindUnique).toHaveBeenCalledWith({
where: { email: 'test@example.com' },
});
expect(mockBcryptCompare).toHaveBeenCalledWith('correct_password', 'hashed_password');
});
});

View File

@ -0,0 +1,325 @@
import { describe, it, expect } from 'vitest';
import {
registerSchema,
loginSchema,
forgotPasswordSchema,
resetPasswordSchema,
sessionFilterSchema,
companySettingsSchema,
userUpdateSchema,
metricsQuerySchema,
validateInput,
} from '../../lib/validation';
describe('Validation Schemas', () => {
// Helper for password validation
const validPassword = 'Password123!';
const invalidPasswordShort = 'Pass1!';
const invalidPasswordNoLower = 'PASSWORD123!';
const invalidPasswordNoUpper = 'password123!';
const invalidPasswordNoNumber = 'Password!!';
const invalidPasswordNoSpecial = 'Password123';
// Helper for email validation
const validEmail = 'test@example.com';
const invalidEmailFormat = 'test@example';
const invalidEmailTooLong = 'a'.repeat(250) + '@example.com'; // 250 + 11 = 261 chars
// Helper for company name validation
const validCompanyName = 'My Company Inc.';
const invalidCompanyNameEmpty = '';
const invalidCompanyNameTooLong = 'A'.repeat(101);
const invalidCompanyNameChars = 'My Company #$%';
describe('registerSchema', () => {
it('should validate a valid registration object', () => {
const data = {
email: validEmail,
password: validPassword,
company: validCompanyName,
};
expect(registerSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = {
email: invalidEmailFormat,
password: validPassword,
company: validCompanyName,
};
expect(registerSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid password', () => {
const data = {
email: validEmail,
password: invalidPasswordShort,
company: validCompanyName,
};
expect(registerSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid company name', () => {
const data = {
email: validEmail,
password: validPassword,
company: invalidCompanyNameEmpty,
};
expect(registerSchema.safeParse(data).success).toBe(false);
});
});
describe('loginSchema', () => {
it('should validate a valid login object', () => {
const data = {
email: validEmail,
password: validPassword,
};
expect(loginSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = {
email: invalidEmailFormat,
password: validPassword,
};
expect(loginSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an empty password', () => {
const data = {
email: validEmail,
password: '',
};
expect(loginSchema.safeParse(data).success).toBe(false);
});
});
describe('forgotPasswordSchema', () => {
it('should validate a valid email', () => {
const data = { email: validEmail };
expect(forgotPasswordSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = { email: invalidEmailFormat };
expect(forgotPasswordSchema.safeParse(data).success).toBe(false);
});
});
describe('resetPasswordSchema', () => {
it('should validate a valid reset password object', () => {
const data = {
token: 'some-valid-token',
password: validPassword,
};
expect(resetPasswordSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an empty token', () => {
const data = {
token: '',
password: validPassword,
};
expect(resetPasswordSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid password', () => {
const data = {
token: 'some-valid-token',
password: invalidPasswordShort,
};
expect(resetPasswordSchema.safeParse(data).success).toBe(false);
});
});
describe('sessionFilterSchema', () => {
it('should validate a valid session filter object', () => {
const data = {
search: 'query',
sentiment: 'POSITIVE',
category: 'SCHEDULE_HOURS',
startDate: '2023-01-01T00:00:00Z',
endDate: '2023-01-31T23:59:59Z',
page: 1,
limit: 20,
};
expect(sessionFilterSchema.safeParse(data).success).toBe(true);
});
it('should validate with only optional fields', () => {
const data = {};
expect(sessionFilterSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid sentiment', () => {
const data = { sentiment: 'INVALID' };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid category', () => {
const data = { category: 'INVALID_CATEGORY' };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid date format', () => {
const data = { startDate: '2023-01-01' }; // Missing time
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate page less than 1', () => {
const data = { page: 0 };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
it('should invalidate limit greater than 100', () => {
const data = { limit: 101 };
expect(sessionFilterSchema.safeParse(data).success).toBe(false);
});
});
describe('companySettingsSchema', () => {
it('should validate a valid company settings object', () => {
const data = {
name: validCompanyName,
csvUrl: 'http://example.com/data.csv',
csvUsername: 'user',
csvPassword: 'password',
sentimentAlert: 0.5,
dashboardOpts: { theme: 'dark' },
};
expect(companySettingsSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid CSV URL', () => {
const data = {
name: validCompanyName,
csvUrl: 'invalid-url',
};
expect(companySettingsSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid company name', () => {
const data = {
name: invalidCompanyNameEmpty,
csvUrl: 'http://example.com/data.csv',
};
expect(companySettingsSchema.safeParse(data).success).toBe(false);
});
it('should invalidate sentimentAlert out of range', () => {
const data = {
name: validCompanyName,
csvUrl: 'http://example.com/data.csv',
sentimentAlert: 1.1,
};
expect(companySettingsSchema.safeParse(data).success).toBe(false);
});
});
describe('userUpdateSchema', () => {
it('should validate a valid user update object with all fields', () => {
const data = {
email: validEmail,
role: 'ADMIN',
password: validPassword,
};
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should validate a valid user update object with only email', () => {
const data = { email: validEmail };
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should validate a valid user update object with only role', () => {
const data = { role: 'USER' };
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should validate a valid user update object with only password', () => {
const data = { password: validPassword };
expect(userUpdateSchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid email', () => {
const data = { email: invalidEmailFormat };
expect(userUpdateSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid role', () => {
const data = { role: 'SUPERUSER' };
expect(userUpdateSchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid password', () => {
const data = { password: invalidPasswordShort };
expect(userUpdateSchema.safeParse(data).success).toBe(false);
});
});
describe('metricsQuerySchema', () => {
it('should validate a valid metrics query object', () => {
const data = {
startDate: '2023-01-01T00:00:00Z',
endDate: '2023-01-31T23:59:59Z',
companyId: 'a1b2c3d4-e5f6-7890-1234-567890abcdef',
};
expect(metricsQuerySchema.safeParse(data).success).toBe(true);
});
it('should validate with only optional fields', () => {
const data = {};
expect(metricsQuerySchema.safeParse(data).success).toBe(true);
});
it('should invalidate an invalid date format', () => {
const data = { startDate: '2023-01-01' };
expect(metricsQuerySchema.safeParse(data).success).toBe(false);
});
it('should invalidate an invalid companyId format', () => {
const data = { companyId: 'invalid-uuid' };
expect(metricsQuerySchema.safeParse(data).success).toBe(false);
});
});
describe('validateInput', () => {
const testSchema = registerSchema; // Using registerSchema for validateInput tests
it('should return success true and data for valid input', () => {
const data = {
email: validEmail,
password: validPassword,
company: validCompanyName,
};
const result = validateInput(testSchema, data);
expect(result.success).toBe(true);
expect((result as any).data).toEqual(data);
});
it('should return success false and errors for invalid input', () => {
const data = {
email: invalidEmailFormat,
password: invalidPasswordShort,
company: invalidCompanyNameEmpty,
};
const result = validateInput(testSchema, data);
expect(result.success).toBe(false);
expect((result as any).errors).toEqual(expect.arrayContaining([
'email: Invalid email format',
'password: Password must be at least 12 characters long',
'company: Company name is required',
]));
});
it('should handle non-ZodError errors gracefully', () => {
const mockSchema = {
parse: () => { throw new Error('Some unexpected error'); }
} as any;
const result = validateInput(mockSchema, {});
expect(result.success).toBe(false);
expect((result as any).errors).toEqual(['Invalid input']);
});
});
});