mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 15:32:10 +01:00
Security Enhancements: - Implemented proper rate limiting with automatic cleanup for /register and /forgot-password endpoints - Added memory usage protection with MAX_ENTRIES limit (10000) - Fixed rate limiter memory leaks by adding cleanup intervals - Improved IP extraction with x-real-ip and x-client-ip header support Code Quality Improvements: - Refactored ProcessingStatusManager from individual functions to class-based architecture - Maintained backward compatibility with singleton instance pattern - Fixed TypeScript strict mode violations across the codebase - Resolved all build errors and type mismatches UI Component Fixes: - Removed unused chart components (Charts.tsx, DonutChart.tsx) - Fixed calendar component type issues by removing unused custom implementations - Resolved theme provider type imports - Fixed confetti component default options handling - Corrected pointer component coordinate type definitions Type System Improvements: - Extended NextAuth types to support dual auth systems (regular and platform users) - Fixed nullable type handling throughout the codebase - Resolved Prisma JSON field type compatibility issues - Corrected SessionMessage and ImportRecord interface definitions - Fixed ES2015 iteration compatibility issues Database & Performance: - Updated database pool configuration for Prisma adapter compatibility - Fixed pagination response structure in user management endpoints - Improved error handling with proper error class usage Testing & Build: - All TypeScript compilation errors resolved - ESLint warnings remain but no errors - Build completes successfully with proper static generation
186 lines
5.1 KiB
TypeScript
186 lines
5.1 KiB
TypeScript
// Centralized environment variable management
|
|
import { readFileSync } from "node:fs";
|
|
import { dirname, join } from "node:path";
|
|
import { fileURLToPath } from "node:url";
|
|
|
|
/**
|
|
* Parse environment variable value by removing quotes, comments, and trimming whitespace
|
|
*/
|
|
function parseEnvValue(value: string | undefined): string {
|
|
if (!value) return "";
|
|
|
|
// Trim whitespace
|
|
let cleaned = value.trim();
|
|
|
|
// Remove inline comments (everything after #)
|
|
const commentIndex = cleaned.indexOf("#");
|
|
if (commentIndex !== -1) {
|
|
cleaned = cleaned.substring(0, commentIndex).trim();
|
|
}
|
|
|
|
// Remove surrounding quotes (both single and double)
|
|
if (
|
|
(cleaned.startsWith('"') && cleaned.endsWith('"')) ||
|
|
(cleaned.startsWith("'") && cleaned.endsWith("'"))
|
|
) {
|
|
cleaned = cleaned.slice(1, -1);
|
|
}
|
|
|
|
return cleaned;
|
|
}
|
|
|
|
/**
|
|
* Parse integer with fallback to default value
|
|
*/
|
|
function parseIntWithDefault(
|
|
value: string | undefined,
|
|
defaultValue: number
|
|
): number {
|
|
const cleaned = parseEnvValue(value);
|
|
if (!cleaned) return defaultValue;
|
|
|
|
const parsed = Number.parseInt(cleaned, 10);
|
|
return Number.isNaN(parsed) ? defaultValue : parsed;
|
|
}
|
|
|
|
// Load environment variables from .env.local
|
|
const __filename = fileURLToPath(import.meta.url);
|
|
const __dirname = dirname(__filename);
|
|
const envPath = join(__dirname, "..", ".env.local");
|
|
|
|
// Load .env.local if it exists
|
|
try {
|
|
const envFile = readFileSync(envPath, "utf8");
|
|
const envVars = envFile
|
|
.split("\n")
|
|
.filter((line) => line.trim() && !line.startsWith("#"));
|
|
|
|
envVars.forEach((line) => {
|
|
const [key, ...valueParts] = line.split("=");
|
|
if (key && valueParts.length > 0) {
|
|
const rawValue = valueParts.join("=");
|
|
const cleanedValue = parseEnvValue(rawValue);
|
|
if (!process.env[key.trim()]) {
|
|
process.env[key.trim()] = cleanedValue;
|
|
}
|
|
}
|
|
});
|
|
} catch (_error) {
|
|
// Silently fail if .env.local doesn't exist
|
|
}
|
|
|
|
/**
|
|
* Typed environment variables with defaults
|
|
*/
|
|
export const env = {
|
|
// NextAuth
|
|
NEXTAUTH_URL:
|
|
parseEnvValue(process.env.NEXTAUTH_URL) || "http://localhost:3000",
|
|
NEXTAUTH_SECRET: parseEnvValue(process.env.NEXTAUTH_SECRET) || "",
|
|
NODE_ENV: parseEnvValue(process.env.NODE_ENV) || "development",
|
|
|
|
// OpenAI
|
|
OPENAI_API_KEY: parseEnvValue(process.env.OPENAI_API_KEY) || "",
|
|
|
|
// Scheduler Configuration
|
|
SCHEDULER_ENABLED: parseEnvValue(process.env.SCHEDULER_ENABLED) === "true",
|
|
CSV_IMPORT_INTERVAL:
|
|
parseEnvValue(process.env.CSV_IMPORT_INTERVAL) || "*/15 * * * *",
|
|
IMPORT_PROCESSING_INTERVAL:
|
|
parseEnvValue(process.env.IMPORT_PROCESSING_INTERVAL) || "*/5 * * * *",
|
|
IMPORT_PROCESSING_BATCH_SIZE: parseIntWithDefault(
|
|
process.env.IMPORT_PROCESSING_BATCH_SIZE,
|
|
50
|
|
),
|
|
SESSION_PROCESSING_INTERVAL:
|
|
parseEnvValue(process.env.SESSION_PROCESSING_INTERVAL) || "0 * * * *",
|
|
SESSION_PROCESSING_BATCH_SIZE: parseIntWithDefault(
|
|
process.env.SESSION_PROCESSING_BATCH_SIZE,
|
|
0
|
|
),
|
|
SESSION_PROCESSING_CONCURRENCY: parseIntWithDefault(
|
|
process.env.SESSION_PROCESSING_CONCURRENCY,
|
|
5
|
|
),
|
|
|
|
// Database Configuration
|
|
DATABASE_URL: parseEnvValue(process.env.DATABASE_URL) || "",
|
|
DATABASE_URL_DIRECT: parseEnvValue(process.env.DATABASE_URL_DIRECT) || "",
|
|
|
|
// Database Connection Pooling
|
|
DATABASE_CONNECTION_LIMIT: parseIntWithDefault(
|
|
process.env.DATABASE_CONNECTION_LIMIT,
|
|
20
|
|
),
|
|
DATABASE_POOL_TIMEOUT: parseIntWithDefault(
|
|
process.env.DATABASE_POOL_TIMEOUT,
|
|
10
|
|
),
|
|
|
|
// Server
|
|
PORT: parseIntWithDefault(process.env.PORT, 3000),
|
|
} as const;
|
|
|
|
/**
|
|
* Validate required environment variables
|
|
*/
|
|
export function validateEnv(): { valid: boolean; errors: string[] } {
|
|
const errors: string[] = [];
|
|
|
|
if (!env.DATABASE_URL) {
|
|
errors.push("DATABASE_URL is required");
|
|
}
|
|
|
|
if (!env.NEXTAUTH_SECRET) {
|
|
errors.push("NEXTAUTH_SECRET is required");
|
|
}
|
|
|
|
if (!env.OPENAI_API_KEY && env.NODE_ENV === "production") {
|
|
errors.push("OPENAI_API_KEY is required in production");
|
|
}
|
|
|
|
return {
|
|
valid: errors.length === 0,
|
|
errors,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Get scheduler configuration from environment variables
|
|
*/
|
|
export function getSchedulerConfig() {
|
|
return {
|
|
enabled: env.SCHEDULER_ENABLED,
|
|
csvImport: {
|
|
interval: env.CSV_IMPORT_INTERVAL,
|
|
},
|
|
importProcessing: {
|
|
interval: env.IMPORT_PROCESSING_INTERVAL,
|
|
batchSize: env.IMPORT_PROCESSING_BATCH_SIZE,
|
|
},
|
|
sessionProcessing: {
|
|
interval: env.SESSION_PROCESSING_INTERVAL,
|
|
batchSize: env.SESSION_PROCESSING_BATCH_SIZE,
|
|
concurrency: env.SESSION_PROCESSING_CONCURRENCY,
|
|
},
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Log environment configuration (safe for production)
|
|
*/
|
|
export function logEnvConfig(): void {
|
|
console.log("[Environment] Configuration:");
|
|
console.log(` NODE_ENV: ${env.NODE_ENV}`);
|
|
console.log(` NEXTAUTH_URL: ${env.NEXTAUTH_URL}`);
|
|
console.log(` SCHEDULER_ENABLED: ${env.SCHEDULER_ENABLED}`);
|
|
console.log(` PORT: ${env.PORT}`);
|
|
|
|
if (env.SCHEDULER_ENABLED) {
|
|
console.log(" Scheduler intervals:");
|
|
console.log(` CSV Import: ${env.CSV_IMPORT_INTERVAL}`);
|
|
console.log(` Import Processing: ${env.IMPORT_PROCESSING_INTERVAL}`);
|
|
console.log(` Session Processing: ${env.SESSION_PROCESSING_INTERVAL}`);
|
|
}
|
|
}
|