refactor: fix biome linting issues and update project documentation

- Fix 36+ biome linting issues reducing errors/warnings from 227 to 191
- Replace explicit 'any' types with proper TypeScript interfaces
- Fix React hooks dependencies and useCallback patterns
- Resolve unused variables and parameter assignment issues
- Improve accessibility with proper label associations
- Add comprehensive API documentation for admin and security features
- Update README.md with accurate PostgreSQL setup and current tech stack
- Create complete documentation for audit logging, CSP monitoring, and batch processing
- Fix outdated project information and missing developer workflows
This commit is contained in:
2025-07-11 21:50:53 +02:00
committed by Kaj Kowalski
parent 3e9e75e854
commit 1eea2cc3e4
121 changed files with 28687 additions and 4895 deletions

View File

@ -34,7 +34,10 @@ export class DatabaseValidator {
};
try {
migrationLogger.startStep("DATABASE_VALIDATION", "Running comprehensive database validation");
migrationLogger.startStep(
"DATABASE_VALIDATION",
"Running comprehensive database validation"
);
// Test database connection
await this.validateConnection(result);
@ -62,13 +65,21 @@ export class DatabaseValidator {
if (result.success) {
migrationLogger.completeStep("DATABASE_VALIDATION");
} else {
migrationLogger.failStep("DATABASE_VALIDATION", new Error(`Validation failed with ${result.errors.length} errors`));
migrationLogger.failStep(
"DATABASE_VALIDATION",
new Error(`Validation failed with ${result.errors.length} errors`)
);
}
} catch (error) {
result.success = false;
result.errors.push(`Database validation failed: ${(error as Error).message}`);
migrationLogger.error("DATABASE_VALIDATION", "Critical validation error", error as Error);
result.errors.push(
`Database validation failed: ${(error as Error).message}`
);
migrationLogger.error(
"DATABASE_VALIDATION",
"Critical validation error",
error as Error
);
} finally {
await this.prisma.$disconnect();
}
@ -82,34 +93,54 @@ export class DatabaseValidator {
await this.prisma.$queryRaw`SELECT 1`;
migrationLogger.info("DB_CONNECTION", "Database connection successful");
} catch (error) {
result.errors.push(`Database connection failed: ${(error as Error).message}`);
result.errors.push(
`Database connection failed: ${(error as Error).message}`
);
}
}
private async validateSchemaIntegrity(result: ValidationResult): Promise<void> {
private async validateSchemaIntegrity(
result: ValidationResult
): Promise<void> {
migrationLogger.info("SCHEMA_VALIDATION", "Validating schema integrity");
try {
// Check if all required tables exist
const requiredTables = [
'Company', 'User', 'Session', 'SessionImport', 'Message',
'SessionProcessingStatus', 'Question', 'SessionQuestion',
'AIBatchRequest', 'AIProcessingRequest', 'AIModel',
'AIModelPricing', 'CompanyAIModel', 'PlatformUser'
"Company",
"User",
"Session",
"SessionImport",
"Message",
"SessionProcessingStatus",
"Question",
"SessionQuestion",
"AIBatchRequest",
"AIProcessingRequest",
"AIModel",
"AIModelPricing",
"CompanyAIModel",
"PlatformUser",
];
for (const table of requiredTables) {
try {
await this.prisma.$queryRawUnsafe(`SELECT 1 FROM "${table}" LIMIT 1`);
} catch (error) {
result.errors.push(`Required table missing or inaccessible: ${table}`);
result.errors.push(
`Required table missing or inaccessible: ${table}`
);
}
}
// Check for required enums
const requiredEnums = [
'ProcessingStage', 'ProcessingStatus', 'AIBatchRequestStatus',
'AIRequestStatus', 'SentimentCategory', 'SessionCategory'
"ProcessingStage",
"ProcessingStatus",
"AIBatchRequestStatus",
"AIRequestStatus",
"SentimentCategory",
"SessionCategory",
];
for (const enumName of requiredEnums) {
@ -124,9 +155,10 @@ export class DatabaseValidator {
result.errors.push(`Required enum missing: ${enumName}`);
}
}
} catch (error) {
result.errors.push(`Schema validation failed: ${(error as Error).message}`);
result.errors.push(
`Schema validation failed: ${(error as Error).message}`
);
}
}
@ -135,7 +167,7 @@ export class DatabaseValidator {
try {
// Check for orphaned records
const orphanedSessions = await this.prisma.$queryRaw<{count: bigint}[]>`
const orphanedSessions = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count
FROM "Session" s
LEFT JOIN "Company" c ON s."companyId" = c.id
@ -143,11 +175,15 @@ export class DatabaseValidator {
`;
if (orphanedSessions[0]?.count > 0) {
result.errors.push(`Found ${orphanedSessions[0].count} orphaned sessions`);
result.errors.push(
`Found ${orphanedSessions[0].count} orphaned sessions`
);
}
// Check for sessions without processing status
const sessionsWithoutStatus = await this.prisma.$queryRaw<{count: bigint}[]>`
const sessionsWithoutStatus = await this.prisma.$queryRaw<
{ count: bigint }[]
>`
SELECT COUNT(*) as count
FROM "Session" s
LEFT JOIN "SessionProcessingStatus" sps ON s.id = sps."sessionId"
@ -155,11 +191,15 @@ export class DatabaseValidator {
`;
if (sessionsWithoutStatus[0]?.count > 0) {
result.warnings.push(`Found ${sessionsWithoutStatus[0].count} sessions without processing status`);
result.warnings.push(
`Found ${sessionsWithoutStatus[0].count} sessions without processing status`
);
}
// Check for inconsistent batch processing states
const inconsistentBatchStates = await this.prisma.$queryRaw<{count: bigint}[]>`
const inconsistentBatchStates = await this.prisma.$queryRaw<
{ count: bigint }[]
>`
SELECT COUNT(*) as count
FROM "AIProcessingRequest" apr
WHERE apr."batchId" IS NOT NULL
@ -167,11 +207,14 @@ export class DatabaseValidator {
`;
if (inconsistentBatchStates[0]?.count > 0) {
result.warnings.push(`Found ${inconsistentBatchStates[0].count} requests with inconsistent batch states`);
result.warnings.push(
`Found ${inconsistentBatchStates[0].count} requests with inconsistent batch states`
);
}
} catch (error) {
result.errors.push(`Data integrity validation failed: ${(error as Error).message}`);
result.errors.push(
`Data integrity validation failed: ${(error as Error).message}`
);
}
}
@ -181,71 +224,91 @@ export class DatabaseValidator {
try {
// Check for missing critical indexes
const criticalIndexes = [
{ table: 'Session', columns: ['companyId', 'startTime'] },
{ table: 'SessionProcessingStatus', columns: ['stage', 'status'] },
{ table: 'AIProcessingRequest', columns: ['processingStatus'] },
{ table: 'AIBatchRequest', columns: ['companyId', 'status'] },
{ table: "Session", columns: ["companyId", "startTime"] },
{ table: "SessionProcessingStatus", columns: ["stage", "status"] },
{ table: "AIProcessingRequest", columns: ["processingStatus"] },
{ table: "AIBatchRequest", columns: ["companyId", "status"] },
];
for (const indexInfo of criticalIndexes) {
const indexExists = await this.prisma.$queryRawUnsafe(`
const indexExists = (await this.prisma.$queryRawUnsafe(`
SELECT COUNT(*) as count
FROM pg_indexes
WHERE tablename = '${indexInfo.table}'
AND indexdef LIKE '%${indexInfo.columns.join('%')}%'
`) as {count: string}[];
AND indexdef LIKE '%${indexInfo.columns.join("%")}%'
`)) as { count: string }[];
if (parseInt(indexExists[0]?.count || '0') === 0) {
result.warnings.push(`Missing recommended index on ${indexInfo.table}(${indexInfo.columns.join(', ')})`);
if (parseInt(indexExists[0]?.count || "0") === 0) {
result.warnings.push(
`Missing recommended index on ${indexInfo.table}(${indexInfo.columns.join(", ")})`
);
}
}
} catch (error) {
result.warnings.push(`Index validation failed: ${(error as Error).message}`);
result.warnings.push(
`Index validation failed: ${(error as Error).message}`
);
}
}
private async validateBatchProcessingReadiness(result: ValidationResult): Promise<void> {
migrationLogger.info("BATCH_READINESS", "Validating batch processing readiness");
private async validateBatchProcessingReadiness(
result: ValidationResult
): Promise<void> {
migrationLogger.info(
"BATCH_READINESS",
"Validating batch processing readiness"
);
try {
// Check if AIBatchRequest table is properly configured
const batchTableCheck = await this.prisma.$queryRaw<{count: bigint}[]>`
const batchTableCheck = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count FROM "AIBatchRequest"
`;
// Check if AIProcessingRequest has batch-related fields
const batchFieldsCheck = await this.prisma.$queryRawUnsafe(`
const batchFieldsCheck = (await this.prisma.$queryRawUnsafe(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'AIProcessingRequest'
AND column_name IN ('processingStatus', 'batchId')
`) as {column_name: string}[];
`)) as { column_name: string }[];
if (batchFieldsCheck.length < 2) {
result.errors.push("AIProcessingRequest table missing batch processing fields");
result.errors.push(
"AIProcessingRequest table missing batch processing fields"
);
}
// Check if batch status enum values are correct
const batchStatusValues = await this.prisma.$queryRawUnsafe(`
const batchStatusValues = (await this.prisma.$queryRawUnsafe(`
SELECT unnest(enum_range(NULL::AIBatchRequestStatus)) as value
`) as {value: string}[];
`)) as { value: string }[];
const requiredBatchStatuses = [
'PENDING', 'UPLOADING', 'VALIDATING', 'IN_PROGRESS',
'FINALIZING', 'COMPLETED', 'PROCESSED', 'FAILED', 'CANCELLED'
"PENDING",
"UPLOADING",
"VALIDATING",
"IN_PROGRESS",
"FINALIZING",
"COMPLETED",
"PROCESSED",
"FAILED",
"CANCELLED",
];
const missingStatuses = requiredBatchStatuses.filter(
status => !batchStatusValues.some(v => v.value === status)
(status) => !batchStatusValues.some((v) => v.value === status)
);
if (missingStatuses.length > 0) {
result.errors.push(`Missing batch status values: ${missingStatuses.join(', ')}`);
result.errors.push(
`Missing batch status values: ${missingStatuses.join(", ")}`
);
}
} catch (error) {
result.errors.push(`Batch processing readiness validation failed: ${(error as Error).message}`);
result.errors.push(
`Batch processing readiness validation failed: ${(error as Error).message}`
);
}
}
@ -265,14 +328,16 @@ export class DatabaseValidator {
try {
await test();
} catch (error) {
result.warnings.push(`Prisma model access issue: ${(error as Error).message}`);
result.warnings.push(
`Prisma model access issue: ${(error as Error).message}`
);
}
}
// Test complex queries that tRPC will use
try {
await this.prisma.session.findMany({
where: { companyId: 'test' },
where: { companyId: "test" },
include: {
messages: true,
processingStatus: true,
@ -281,13 +346,16 @@ export class DatabaseValidator {
});
} catch (error) {
// This is expected to fail with the test companyId, but should not error on structure
if (!(error as Error).message.includes('test')) {
result.warnings.push(`Complex query structure issue: ${(error as Error).message}`);
if (!(error as Error).message.includes("test")) {
result.warnings.push(
`Complex query structure issue: ${(error as Error).message}`
);
}
}
} catch (error) {
result.warnings.push(`tRPC readiness validation failed: ${(error as Error).message}`);
result.warnings.push(
`tRPC readiness validation failed: ${(error as Error).message}`
);
}
}
@ -301,7 +369,8 @@ export class DatabaseValidator {
const sessionsCount = await this.prisma.session.count();
const messagesCount = await this.prisma.message.count();
const batchRequestsCount = await this.prisma.aIBatchRequest.count();
const processingRequestsCount = await this.prisma.aIProcessingRequest.count();
const processingRequestsCount =
await this.prisma.aIProcessingRequest.count();
result.metrics = {
companies: companiesCount,
@ -313,27 +382,31 @@ export class DatabaseValidator {
};
// Check processing status distribution
const processingStatusCounts = await this.prisma.sessionProcessingStatus.groupBy({
by: ['status'],
_count: { status: true },
});
const processingStatusCounts =
await this.prisma.sessionProcessingStatus.groupBy({
by: ["status"],
_count: { status: true },
});
for (const statusCount of processingStatusCounts) {
result.metrics[`processing_${statusCount.status.toLowerCase()}`] = statusCount._count.status;
result.metrics[`processing_${statusCount.status.toLowerCase()}`] =
statusCount._count.status;
}
// Check batch request status distribution
const batchStatusCounts = await this.prisma.aIBatchRequest.groupBy({
by: ['status'],
by: ["status"],
_count: { status: true },
});
for (const statusCount of batchStatusCounts) {
result.metrics[`batch_${statusCount.status.toLowerCase()}`] = statusCount._count.status;
result.metrics[`batch_${statusCount.status.toLowerCase()}`] =
statusCount._count.status;
}
} catch (error) {
result.warnings.push(`Metrics collection failed: ${(error as Error).message}`);
result.warnings.push(
`Metrics collection failed: ${(error as Error).message}`
);
}
}
}
@ -342,22 +415,23 @@ export class DatabaseValidator {
if (import.meta.url === `file://${process.argv[1]}`) {
const validator = new DatabaseValidator();
validator.validateDatabase()
validator
.validateDatabase()
.then((result) => {
console.log('\n=== DATABASE VALIDATION RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== DATABASE VALIDATION RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
if (result.errors.length > 0) {
console.log('\n❌ ERRORS:');
result.errors.forEach(error => console.log(` - ${error}`));
console.log("\n❌ ERRORS:");
result.errors.forEach((error) => console.log(` - ${error}`));
}
if (result.warnings.length > 0) {
console.log('\n⚠ WARNINGS:');
result.warnings.forEach(warning => console.log(` - ${warning}`));
console.log("\n⚠ WARNINGS:");
result.warnings.forEach((warning) => console.log(` - ${warning}`));
}
console.log('\n📊 METRICS:');
console.log("\n📊 METRICS:");
Object.entries(result.metrics).forEach(([key, value]) => {
console.log(` ${key}: ${value}`);
});
@ -365,7 +439,7 @@ if (import.meta.url === `file://${process.argv[1]}`) {
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Validation failed:', error);
console.error("Validation failed:", error);
process.exit(1);
});
}
}