fix: resolved biome errors

This commit is contained in:
2025-07-13 20:12:17 +02:00
parent 42ad5b7c80
commit 6114e80e98
23 changed files with 7589 additions and 4180 deletions

View File

@ -280,6 +280,84 @@ function addCORSHeaders(
}
}
/**
* Process authentication and authorization
*/
async function processAuthAndAuthz(
context: APIContext,
options: APIHandlerOptions
): Promise<void> {
if (options.requireAuth) {
await validateAuthentication(context);
}
if (options.requiredRole || options.requirePlatformAccess) {
await validateAuthorization(context, options);
}
}
/**
* Process input validation
*/
async function processValidation(
request: NextRequest,
options: APIHandlerOptions
): Promise<{ validatedData: unknown; validatedQuery: unknown }> {
let validatedData: unknown;
if (options.validateInput && request.method !== "GET") {
validatedData = await validateInput(request, options.validateInput);
}
let validatedQuery: unknown;
if (options.validateQuery) {
validatedQuery = validateQuery(request, options.validateQuery);
}
return { validatedData, validatedQuery };
}
/**
* Create and configure response
*/
function createAPIResponse<T>(
result: T,
context: APIContext,
options: APIHandlerOptions
): NextResponse {
const response = NextResponse.json(
createSuccessResponse(result, { requestId: context.requestId })
);
response.headers.set("X-Request-ID", context.requestId);
if (options.cacheControl) {
response.headers.set("Cache-Control", options.cacheControl);
}
addCORSHeaders(response, options);
return response;
}
/**
* Handle request execution with audit logging
*/
async function executeWithAudit<T>(
handler: APIHandler<T>,
context: APIContext,
validatedData: unknown,
validatedQuery: unknown,
request: NextRequest,
options: APIHandlerOptions
): Promise<T> {
const result = await handler(context, validatedData, validatedQuery);
if (options.auditLog) {
await logAPIAccess(context, "success", request.url);
}
return result;
}
/**
* Main API handler factory
*/
@ -291,64 +369,32 @@ export function createAPIHandler<T = unknown>(
let context: APIContext | undefined;
try {
// 1. Create request context
context = await createAPIContext(request);
// 2. Apply rate limiting
if (options.rateLimit) {
await applyRateLimit(context, options.rateLimit);
}
// 3. Validate authentication
if (options.requireAuth) {
await validateAuthentication(context);
}
await processAuthAndAuthz(context, options);
// 4. Validate authorization
if (options.requiredRole || options.requirePlatformAccess) {
await validateAuthorization(context, options);
}
// 5. Validate input
let validatedData;
if (options.validateInput && request.method !== "GET") {
validatedData = await validateInput(request, options.validateInput);
}
// 6. Validate query parameters
let validatedQuery;
if (options.validateQuery) {
validatedQuery = validateQuery(request, options.validateQuery);
}
// 7. Execute handler
const result = await handler(context, validatedData, validatedQuery);
// 8. Audit logging
if (options.auditLog) {
await logAPIAccess(context, "success", request.url);
}
// 9. Create response
const response = NextResponse.json(
createSuccessResponse(result, { requestId: context.requestId })
const { validatedData, validatedQuery } = await processValidation(
request,
options
);
// 10. Add headers
response.headers.set("X-Request-ID", context.requestId);
const result = await executeWithAudit(
handler,
context,
validatedData,
validatedQuery,
request,
options
);
if (options.cacheControl) {
response.headers.set("Cache-Control", options.cacheControl);
}
addCORSHeaders(response, options);
return response;
return createAPIResponse(result, context, options);
} catch (error) {
// Handle errors consistently
const requestId = context?.requestId || crypto.randomUUID();
// Log failed requests
if (options.auditLog && context) {
await logAPIAccess(context, "error", request.url, error as Error);
}

View File

@ -137,7 +137,7 @@ export function stopOptimizedBatchScheduler(): void {
{ task: retryFailedTask, name: "retryFailedTask" },
];
for (const { task, name } of tasks) {
for (const { task } of tasks) {
if (task) {
task.stop();
task.destroy();

View File

@ -169,6 +169,10 @@ const ConfigSchema = z.object({
export type AppConfig = z.infer<typeof ConfigSchema>;
type DeepPartial<T> = {
[P in keyof T]?: T[P] extends object ? DeepPartial<T[P]> : T[P];
};
/**
* Configuration provider class
*/
@ -230,8 +234,8 @@ class ConfigProvider {
/**
* Get environment-specific configuration
*/
forEnvironment(env: Environment): Partial<AppConfig> {
const overrides: Record<Environment, any> = {
forEnvironment(env: Environment): DeepPartial<AppConfig> {
const overrides: Record<Environment, DeepPartial<AppConfig>> = {
development: {
app: {
logLevel: "debug",
@ -290,6 +294,169 @@ class ConfigProvider {
return overrides[env] || {};
}
/**
* Extract app configuration from environment
*/
private extractAppConfig(env: NodeJS.ProcessEnv, environment: Environment) {
return {
name: env.APP_NAME || "LiveDash",
version: env.APP_VERSION || "1.0.0",
environment,
baseUrl: env.NEXTAUTH_URL || "http://localhost:3000",
port: Number.parseInt(env.PORT || "3000", 10),
logLevel:
(env.LOG_LEVEL as "debug" | "info" | "warn" | "error") || "info",
features: {
enableMetrics: env.ENABLE_METRICS !== "false",
enableAnalytics: env.ENABLE_ANALYTICS !== "false",
enableCaching: env.ENABLE_CACHING !== "false",
enableCompression: env.ENABLE_COMPRESSION !== "false",
},
};
}
/**
* Extract database configuration from environment
*/
private extractDatabaseConfig(env: NodeJS.ProcessEnv) {
return {
url: env.DATABASE_URL || "",
directUrl: env.DATABASE_URL_DIRECT,
maxConnections: Number.parseInt(env.DB_MAX_CONNECTIONS || "10", 10),
connectionTimeout: Number.parseInt(
env.DB_CONNECTION_TIMEOUT || "30000",
10
),
queryTimeout: Number.parseInt(env.DB_QUERY_TIMEOUT || "60000", 10),
retryAttempts: Number.parseInt(env.DB_RETRY_ATTEMPTS || "3", 10),
retryDelay: Number.parseInt(env.DB_RETRY_DELAY || "1000", 10),
};
}
/**
* Extract auth configuration from environment
*/
private extractAuthConfig(env: NodeJS.ProcessEnv) {
return {
secret: env.NEXTAUTH_SECRET || "",
url: env.NEXTAUTH_URL || "http://localhost:3000",
sessionMaxAge: Number.parseInt(env.AUTH_SESSION_MAX_AGE || "86400", 10),
providers: {
credentials: env.AUTH_CREDENTIALS_ENABLED !== "false",
github: env.AUTH_GITHUB_ENABLED === "true",
google: env.AUTH_GOOGLE_ENABLED === "true",
},
};
}
/**
* Extract security configuration from environment
*/
private extractSecurityConfig(env: NodeJS.ProcessEnv) {
return {
csp: {
enabled: env.CSP_ENABLED !== "false",
reportUri: env.CSP_REPORT_URI,
reportOnly: env.CSP_REPORT_ONLY === "true",
},
csrf: {
enabled: env.CSRF_ENABLED !== "false",
tokenExpiry: Number.parseInt(env.CSRF_TOKEN_EXPIRY || "3600", 10),
},
rateLimit: {
enabled: env.RATE_LIMIT_ENABLED !== "false",
windowMs: Number.parseInt(env.RATE_LIMIT_WINDOW_MS || "900000", 10),
maxRequests: Number.parseInt(env.RATE_LIMIT_MAX_REQUESTS || "100", 10),
},
audit: {
enabled: env.AUDIT_ENABLED !== "false",
retentionDays: Number.parseInt(env.AUDIT_RETENTION_DAYS || "90", 10),
bufferSize: Number.parseInt(env.AUDIT_BUFFER_SIZE || "1000", 10),
},
};
}
/**
* Extract OpenAI configuration from environment
*/
private extractOpenAIConfig(env: NodeJS.ProcessEnv) {
return {
apiKey: env.OPENAI_API_KEY || "",
organization: env.OPENAI_ORGANIZATION,
mockMode: env.OPENAI_MOCK_MODE === "true",
defaultModel: env.OPENAI_DEFAULT_MODEL || "gpt-3.5-turbo",
maxTokens: Number.parseInt(env.OPENAI_MAX_TOKENS || "1000", 10),
temperature: Number.parseFloat(env.OPENAI_TEMPERATURE || "0.1"),
batchConfig: {
enabled: env.OPENAI_BATCH_ENABLED !== "false",
maxRequestsPerBatch: Number.parseInt(
env.OPENAI_BATCH_MAX_REQUESTS || "1000",
10
),
statusCheckInterval: Number.parseInt(
env.OPENAI_BATCH_STATUS_INTERVAL || "60000",
10
),
maxTimeout: Number.parseInt(
env.OPENAI_BATCH_MAX_TIMEOUT || "86400000",
10
),
},
};
}
/**
* Extract scheduler configuration from environment
*/
private extractSchedulerConfig(env: NodeJS.ProcessEnv) {
return {
enabled: env.SCHEDULER_ENABLED !== "false",
csvImport: {
enabled: env.CSV_IMPORT_SCHEDULER_ENABLED !== "false",
interval: env.CSV_IMPORT_INTERVAL || "*/5 * * * *",
},
importProcessor: {
enabled: env.IMPORT_PROCESSOR_ENABLED !== "false",
interval: env.IMPORT_PROCESSOR_INTERVAL || "*/2 * * * *",
},
sessionProcessor: {
enabled: env.SESSION_PROCESSOR_ENABLED !== "false",
interval: env.SESSION_PROCESSOR_INTERVAL || "*/3 * * * *",
batchSize: Number.parseInt(
env.SESSION_PROCESSOR_BATCH_SIZE || "50",
10
),
},
batchProcessor: {
enabled: env.BATCH_PROCESSOR_ENABLED !== "false",
createInterval: env.BATCH_CREATE_INTERVAL || "*/5 * * * *",
statusInterval: env.BATCH_STATUS_INTERVAL || "*/2 * * * *",
resultInterval: env.BATCH_RESULT_INTERVAL || "*/1 * * * *",
},
};
}
/**
* Extract email configuration from environment
*/
private extractEmailConfig(env: NodeJS.ProcessEnv) {
return {
enabled: env.EMAIL_ENABLED === "true",
smtp: {
host: env.SMTP_HOST,
port: Number.parseInt(env.SMTP_PORT || "587", 10),
secure: env.SMTP_SECURE === "true",
user: env.SMTP_USER,
password: env.SMTP_PASSWORD,
},
from: env.EMAIL_FROM || "noreply@livedash.com",
templates: {
passwordReset: env.EMAIL_TEMPLATE_PASSWORD_RESET || "password-reset",
userInvitation: env.EMAIL_TEMPLATE_USER_INVITATION || "user-invitation",
},
};
}
/**
* Extract configuration from environment variables
*/
@ -298,130 +465,13 @@ class ConfigProvider {
const environment = (env.NODE_ENV as Environment) || "development";
return {
app: {
name: env.APP_NAME || "LiveDash",
version: env.APP_VERSION || "1.0.0",
environment,
baseUrl: env.NEXTAUTH_URL || "http://localhost:3000",
port: Number.parseInt(env.PORT || "3000", 10),
logLevel: (env.LOG_LEVEL as any) || "info",
features: {
enableMetrics: env.ENABLE_METRICS !== "false",
enableAnalytics: env.ENABLE_ANALYTICS !== "false",
enableCaching: env.ENABLE_CACHING !== "false",
enableCompression: env.ENABLE_COMPRESSION !== "false",
},
},
database: {
url: env.DATABASE_URL || "",
directUrl: env.DATABASE_URL_DIRECT,
maxConnections: Number.parseInt(env.DB_MAX_CONNECTIONS || "10", 10),
connectionTimeout: Number.parseInt(
env.DB_CONNECTION_TIMEOUT || "30000",
10
),
queryTimeout: Number.parseInt(env.DB_QUERY_TIMEOUT || "60000", 10),
retryAttempts: Number.parseInt(env.DB_RETRY_ATTEMPTS || "3", 10),
retryDelay: Number.parseInt(env.DB_RETRY_DELAY || "1000", 10),
},
auth: {
secret: env.NEXTAUTH_SECRET || "",
url: env.NEXTAUTH_URL || "http://localhost:3000",
sessionMaxAge: Number.parseInt(env.AUTH_SESSION_MAX_AGE || "86400", 10),
providers: {
credentials: env.AUTH_CREDENTIALS_ENABLED !== "false",
github: env.AUTH_GITHUB_ENABLED === "true",
google: env.AUTH_GOOGLE_ENABLED === "true",
},
},
security: {
csp: {
enabled: env.CSP_ENABLED !== "false",
reportUri: env.CSP_REPORT_URI,
reportOnly: env.CSP_REPORT_ONLY === "true",
},
csrf: {
enabled: env.CSRF_ENABLED !== "false",
tokenExpiry: Number.parseInt(env.CSRF_TOKEN_EXPIRY || "3600", 10),
},
rateLimit: {
enabled: env.RATE_LIMIT_ENABLED !== "false",
windowMs: Number.parseInt(env.RATE_LIMIT_WINDOW_MS || "900000", 10),
maxRequests: Number.parseInt(
env.RATE_LIMIT_MAX_REQUESTS || "100",
10
),
},
audit: {
enabled: env.AUDIT_ENABLED !== "false",
retentionDays: Number.parseInt(env.AUDIT_RETENTION_DAYS || "90", 10),
bufferSize: Number.parseInt(env.AUDIT_BUFFER_SIZE || "1000", 10),
},
},
openai: {
apiKey: env.OPENAI_API_KEY || "",
organization: env.OPENAI_ORGANIZATION,
mockMode: env.OPENAI_MOCK_MODE === "true",
defaultModel: env.OPENAI_DEFAULT_MODEL || "gpt-3.5-turbo",
maxTokens: Number.parseInt(env.OPENAI_MAX_TOKENS || "1000", 10),
temperature: Number.parseFloat(env.OPENAI_TEMPERATURE || "0.1"),
batchConfig: {
enabled: env.OPENAI_BATCH_ENABLED !== "false",
maxRequestsPerBatch: Number.parseInt(
env.OPENAI_BATCH_MAX_REQUESTS || "1000",
10
),
statusCheckInterval: Number.parseInt(
env.OPENAI_BATCH_STATUS_INTERVAL || "60000",
10
),
maxTimeout: Number.parseInt(
env.OPENAI_BATCH_MAX_TIMEOUT || "86400000",
10
),
},
},
scheduler: {
enabled: env.SCHEDULER_ENABLED !== "false",
csvImport: {
enabled: env.CSV_IMPORT_SCHEDULER_ENABLED !== "false",
interval: env.CSV_IMPORT_INTERVAL || "*/5 * * * *",
},
importProcessor: {
enabled: env.IMPORT_PROCESSOR_ENABLED !== "false",
interval: env.IMPORT_PROCESSOR_INTERVAL || "*/2 * * * *",
},
sessionProcessor: {
enabled: env.SESSION_PROCESSOR_ENABLED !== "false",
interval: env.SESSION_PROCESSOR_INTERVAL || "*/3 * * * *",
batchSize: Number.parseInt(
env.SESSION_PROCESSOR_BATCH_SIZE || "50",
10
),
},
batchProcessor: {
enabled: env.BATCH_PROCESSOR_ENABLED !== "false",
createInterval: env.BATCH_CREATE_INTERVAL || "*/5 * * * *",
statusInterval: env.BATCH_STATUS_INTERVAL || "*/2 * * * *",
resultInterval: env.BATCH_RESULT_INTERVAL || "*/1 * * * *",
},
},
email: {
enabled: env.EMAIL_ENABLED === "true",
smtp: {
host: env.SMTP_HOST,
port: Number.parseInt(env.SMTP_PORT || "587", 10),
secure: env.SMTP_SECURE === "true",
user: env.SMTP_USER,
password: env.SMTP_PASSWORD,
},
from: env.EMAIL_FROM || "noreply@livedash.com",
templates: {
passwordReset: env.EMAIL_TEMPLATE_PASSWORD_RESET || "password-reset",
userInvitation:
env.EMAIL_TEMPLATE_USER_INVITATION || "user-invitation",
},
},
app: this.extractAppConfig(env, environment),
database: this.extractDatabaseConfig(env),
auth: this.extractAuthConfig(env),
security: this.extractSecurityConfig(env),
openai: this.extractOpenAIConfig(env),
scheduler: this.extractSchedulerConfig(env),
email: this.extractEmailConfig(env),
};
}

View File

@ -191,7 +191,7 @@ export const DynamicAuditLogsPanel = createDynamicComponent(
// React wrapper for React.lazy with Suspense
export function createLazyComponent<
T extends Record<string, any> = Record<string, any>,
T extends Record<string, unknown> = Record<string, unknown>,
>(
importFunc: () => Promise<{ default: ComponentType<T> }>,
fallback: ComponentType = LoadingSpinner

View File

@ -15,6 +15,26 @@ import {
type MockResponseType,
} from "./openai-responses";
interface ChatCompletionParams {
model: string;
messages: Array<{ role: string; content: string }>;
temperature?: number;
max_tokens?: number;
[key: string]: unknown;
}
interface BatchCreateParams {
input_file_id: string;
endpoint: string;
completion_window: string;
metadata?: Record<string, unknown>;
}
interface FileCreateParams {
file: string; // File content as string for mock purposes
purpose: string;
}
interface MockOpenAIConfig {
enabled: boolean;
baseDelay: number; // Base delay in ms to simulate API latency
@ -115,12 +135,9 @@ class OpenAIMockServer {
/**
* Mock chat completions endpoint
*/
async mockChatCompletion(request: {
model: string;
messages: Array<{ role: string; content: string }>;
temperature?: number;
max_tokens?: number;
}): Promise<MockChatCompletion> {
async mockChatCompletion(
request: ChatCompletionParams
): Promise<MockChatCompletion> {
this.requestCount++;
await this.simulateDelay();
@ -172,12 +189,9 @@ class OpenAIMockServer {
/**
* Mock batch creation endpoint
*/
async mockCreateBatch(request: {
input_file_id: string;
endpoint: string;
completion_window: string;
metadata?: Record<string, string>;
}): Promise<MockBatchResponse> {
async mockCreateBatch(
request: BatchCreateParams
): Promise<MockBatchResponse> {
await this.simulateDelay();
if (this.shouldSimulateError()) {
@ -214,10 +228,7 @@ class OpenAIMockServer {
/**
* Mock file upload endpoint
*/
async mockUploadFile(request: {
file: string; // File content
purpose: string;
}): Promise<{
async mockUploadFile(request: FileCreateParams): Promise<{
id: string;
object: string;
purpose: string;
@ -364,23 +375,42 @@ export const openAIMock = new OpenAIMockServer();
/**
* Drop-in replacement for OpenAI client that uses mocks when enabled
*/
export class MockOpenAIClient {
private realClient: unknown;
interface OpenAIClient {
chat: {
completions: {
create: (params: ChatCompletionParams) => Promise<MockChatCompletion>;
};
};
batches: {
create: (params: BatchCreateParams) => Promise<MockBatchResponse>;
retrieve: (batchId: string) => Promise<MockBatchResponse>;
};
files: {
create: (params: FileCreateParams) => Promise<{
id: string;
object: string;
purpose: string;
filename: string;
}>;
content: (fileId: string) => Promise<string>;
};
}
constructor(realClient: unknown) {
export class MockOpenAIClient {
private realClient: OpenAIClient;
constructor(realClient: OpenAIClient) {
this.realClient = realClient;
}
get chat() {
return {
completions: {
create: async (params: any) => {
create: async (params: ChatCompletionParams) => {
if (openAIMock.isEnabled()) {
return openAIMock.mockChatCompletion(params as any);
return openAIMock.mockChatCompletion(params);
}
return (this.realClient as any).chat.completions.create(
params as any
);
return this.realClient.chat.completions.create(params);
},
},
};
@ -388,34 +418,34 @@ export class MockOpenAIClient {
get batches() {
return {
create: async (params: any) => {
create: async (params: BatchCreateParams) => {
if (openAIMock.isEnabled()) {
return openAIMock.mockCreateBatch(params as any);
return openAIMock.mockCreateBatch(params);
}
return (this.realClient as any).batches.create(params as any);
return this.realClient.batches.create(params);
},
retrieve: async (batchId: string) => {
if (openAIMock.isEnabled()) {
return openAIMock.mockGetBatch(batchId);
}
return (this.realClient as any).batches.retrieve(batchId);
return this.realClient.batches.retrieve(batchId);
},
};
}
get files() {
return {
create: async (params: any) => {
create: async (params: FileCreateParams) => {
if (openAIMock.isEnabled()) {
return openAIMock.mockUploadFile(params);
}
return (this.realClient as any).files.create(params);
return this.realClient.files.create(params);
},
content: async (fileId: string) => {
if (openAIMock.isEnabled()) {
return openAIMock.mockGetFileContent(fileId);
}
return (this.realClient as any).files.content(fileId);
return this.realClient.files.content(fileId);
},
};
}

View File

@ -181,7 +181,8 @@ class PerformanceMonitor {
// Placeholder for analytics integration
// You could send this to Google Analytics, Vercel Analytics, etc.
if (typeof window !== "undefined" && "gtag" in window) {
(window as any).gtag("event", "core_web_vital", {
const gtag = (window as { gtag?: (...args: unknown[]) => void }).gtag;
gtag?.("event", "core_web_vital", {
name: metricName,
value: Math.round(value),
metric_rating: this.getRating(metricName, value),

View File

@ -169,7 +169,7 @@ export class PerformanceCache<K extends {} = string, V = unknown> {
/**
* Memoize a function with caching
*/
memoize<Args extends any[], Return extends V>(
memoize<Args extends unknown[], Return extends V>(
fn: (...args: Args) => Promise<Return> | Return,
keyGenerator?: (...args: Args) => K,
ttl?: number
@ -421,7 +421,7 @@ export class CacheUtils {
/**
* Cache the result of an async function
*/
static cached<T extends any[], R>(
static cached<T extends unknown[], R>(
cacheName: string,
fn: (...args: T) => Promise<R>,
options: CacheOptions & {

View File

@ -155,34 +155,36 @@ export class RequestDeduplicator {
}> = [];
// Create the main promise
const promise = new Promise<T>(async (resolve, reject) => {
const promise = new Promise<T>((resolve, reject) => {
resolvers.push({ resolve, reject });
try {
const result = await fn();
// Execute the async function
fn()
.then((result) => {
// Cache the result
if (options.ttl && options.ttl > 0) {
this.results.set(key, {
value: result,
timestamp: Date.now(),
ttl: options.ttl,
});
}
// Cache the result
if (options.ttl && options.ttl > 0) {
this.results.set(key, {
value: result,
timestamp: Date.now(),
ttl: options.ttl,
});
}
// Resolve all waiting promises
resolvers.forEach(({ resolve: res }) => res(result));
})
.catch((error) => {
this.stats.errors++;
// Resolve all waiting promises
resolvers.forEach(({ resolve: res }) => res(result));
} catch (error) {
this.stats.errors++;
// Reject all waiting promises
const errorToReject =
error instanceof Error ? error : new Error(String(error));
resolvers.forEach(({ reject: rej }) => rej(errorToReject));
} finally {
// Clean up pending request
this.pendingRequests.delete(key);
}
// Reject all waiting promises
const errorToReject =
error instanceof Error ? error : new Error(String(error));
resolvers.forEach(({ reject: rej }) => rej(errorToReject));
})
.finally(() => {
// Clean up pending request
this.pendingRequests.delete(key);
});
});
// Set up timeout if specified

View File

@ -167,7 +167,11 @@ function startMonitoringIfEnabled(enabled?: boolean): void {
/**
* Helper function to record request metrics if enabled
*/
function recordRequestIfEnabled(timer: ReturnType<typeof PerformanceUtils.createTimer>, isError: boolean, enabled?: boolean): void {
function recordRequestIfEnabled(
timer: ReturnType<typeof PerformanceUtils.createTimer>,
isError: boolean,
enabled?: boolean
): void {
if (enabled) {
performanceMonitor.recordRequest(timer.end(), isError);
}
@ -185,7 +189,7 @@ async function executeRequestWithOptimizations(
if (opts.cache?.enabled || opts.deduplication?.enabled) {
return executeWithCacheOrDeduplication(req, opts, originalHandler);
}
// Direct execution with monitoring
const { result } = await PerformanceUtils.measureAsync(routeName, () =>
originalHandler(req)
@ -216,18 +220,16 @@ async function executeWithCacheOrDeduplication(
opts.cache.ttl
);
}
// Deduplication only
const deduplicator =
deduplicators[
opts.deduplication?.deduplicatorName as keyof typeof deduplicators
] || deduplicators.api;
return deduplicator.execute(
cacheKey,
() => originalHandler(req),
{ ttl: opts.deduplication?.ttl }
);
return deduplicator.execute(cacheKey, () => originalHandler(req), {
ttl: opts.deduplication?.ttl,
});
}
/**
@ -247,7 +249,12 @@ export function enhanceAPIRoute(
try {
startMonitoringIfEnabled(opts.monitoring?.enabled);
const response = await executeRequestWithOptimizations(req, opts, routeName, originalHandler);
const response = await executeRequestWithOptimizations(
req,
opts,
routeName,
originalHandler
);
recordRequestIfEnabled(timer, false, opts.monitoring?.recordRequests);
return response;
} catch (error) {
@ -263,8 +270,10 @@ export function enhanceAPIRoute(
export function PerformanceEnhanced(
options: PerformanceIntegrationOptions = {}
) {
return <T extends new (...args: any[]) => {}>(constructor: T) =>
class extends constructor {
// biome-ignore lint/suspicious/noExplicitAny: Required for mixin class pattern - TypeScript requires any[] for constructor parameters in mixins
return <T extends new (...args: any[]) => {}>(Constructor: T) =>
class extends Constructor {
// biome-ignore lint/suspicious/noExplicitAny: Required for mixin class pattern - TypeScript requires any[] for constructor parameters in mixins
constructor(...args: any[]) {
super(...args);
@ -279,7 +288,7 @@ export function PerformanceEnhanced(
if (typeof originalMethod === "function") {
(this as Record<string, unknown>)[methodName] =
enhanceServiceMethod(
`${constructor.name}.${methodName}`,
`${Constructor.name}.${methodName}`,
originalMethod.bind(this),
options
);

View File

@ -777,9 +777,8 @@ export class PerformanceUtils {
}
descriptor.value = async function (...args: unknown[]) {
const { result } = await PerformanceUtils.measureAsync(
metricName,
() => originalMethod.apply(this, args)
const { result } = await PerformanceUtils.measureAsync(metricName, () =>
originalMethod.apply(this, args)
);
return result;
};