mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 10:52:08 +01:00
fix: resolve critical Biome linting issues and document code quality standards
- Add biome-ignore comments for security-critical non-null assertions - Fix unused variables and parameter ordering issues - Reduce complexity in integration functions via helper extraction - Replace problematic 'any' type casts with proper type definitions - Document code quality and linting standards in CLAUDE.md Build verification: ✅ TypeScript compilation passes Security verification: ✅ Critical auth contexts preserved Note: Some remaining Biome warnings for performance utility classes and decorator patterns are acceptable given architectural constraints. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@ -151,6 +151,85 @@ async function executeWithDeduplication<T extends unknown[], R>(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to start monitoring if enabled
|
||||
*/
|
||||
function startMonitoringIfEnabled(enabled?: boolean): void {
|
||||
if (enabled) {
|
||||
try {
|
||||
performanceMonitor.start();
|
||||
} catch {
|
||||
// Monitoring may already be running
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to record request metrics if enabled
|
||||
*/
|
||||
function recordRequestIfEnabled(timer: ReturnType<typeof PerformanceUtils.createTimer>, isError: boolean, enabled?: boolean): void {
|
||||
if (enabled) {
|
||||
performanceMonitor.recordRequest(timer.end(), isError);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to execute request with caching/deduplication optimizations
|
||||
*/
|
||||
async function executeRequestWithOptimizations(
|
||||
req: NextRequest,
|
||||
opts: ReturnType<typeof mergeOptions>,
|
||||
routeName: string,
|
||||
originalHandler: (req: NextRequest) => Promise<NextResponse>
|
||||
): Promise<NextResponse> {
|
||||
if (opts.cache?.enabled || opts.deduplication?.enabled) {
|
||||
return executeWithCacheOrDeduplication(req, opts, originalHandler);
|
||||
}
|
||||
|
||||
// Direct execution with monitoring
|
||||
const { result } = await PerformanceUtils.measureAsync(routeName, () =>
|
||||
originalHandler(req)
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to execute with cache or deduplication
|
||||
*/
|
||||
async function executeWithCacheOrDeduplication(
|
||||
req: NextRequest,
|
||||
opts: ReturnType<typeof mergeOptions>,
|
||||
originalHandler: (req: NextRequest) => Promise<NextResponse>
|
||||
): Promise<NextResponse> {
|
||||
const url = new URL(req.url);
|
||||
const method = req.method;
|
||||
const params = url.searchParams.toString();
|
||||
const cacheKey = `${method}:${url.pathname}:${params}`;
|
||||
|
||||
if (opts.cache?.enabled) {
|
||||
const cache =
|
||||
caches[opts.cache.cacheName as keyof typeof caches] ||
|
||||
caches.apiResponses;
|
||||
return cache.getOrCompute(
|
||||
cacheKey,
|
||||
() => originalHandler(req),
|
||||
opts.cache.ttl
|
||||
);
|
||||
}
|
||||
|
||||
// Deduplication only
|
||||
const deduplicator =
|
||||
deduplicators[
|
||||
opts.deduplication?.deduplicatorName as keyof typeof deduplicators
|
||||
] || deduplicators.api;
|
||||
|
||||
return deduplicator.execute(
|
||||
cacheKey,
|
||||
() => originalHandler(req),
|
||||
{ ttl: opts.deduplication?.ttl }
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhance an API route handler with performance optimizations
|
||||
*/
|
||||
@ -167,63 +246,12 @@ export function enhanceAPIRoute(
|
||||
const timer = PerformanceUtils.createTimer(`api.${routeName}`);
|
||||
|
||||
try {
|
||||
// Start monitoring if not already running
|
||||
if (opts.monitoring?.enabled) {
|
||||
try {
|
||||
performanceMonitor.start();
|
||||
} catch {
|
||||
// Monitoring may already be running
|
||||
}
|
||||
}
|
||||
|
||||
let response: NextResponse;
|
||||
|
||||
if (opts.cache?.enabled || opts.deduplication?.enabled) {
|
||||
// Generate cache key from request
|
||||
const url = new URL(req.url);
|
||||
const method = req.method;
|
||||
const params = url.searchParams.toString();
|
||||
const cacheKey = `${method}:${url.pathname}:${params}`;
|
||||
|
||||
if (opts.cache?.enabled) {
|
||||
const cache =
|
||||
caches[opts.cache.cacheName as keyof typeof caches] ||
|
||||
caches.apiResponses;
|
||||
response = await cache.getOrCompute(
|
||||
cacheKey,
|
||||
() => originalHandler(req),
|
||||
opts.cache.ttl
|
||||
);
|
||||
} else {
|
||||
// Deduplication only
|
||||
const deduplicator =
|
||||
deduplicators[
|
||||
opts.deduplication!.deduplicatorName as keyof typeof deduplicators
|
||||
] || deduplicators.api;
|
||||
|
||||
response = await deduplicator.execute(
|
||||
cacheKey,
|
||||
() => originalHandler(req),
|
||||
{ ttl: opts.deduplication!.ttl }
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Direct execution with monitoring
|
||||
const { result } = await PerformanceUtils.measureAsync(routeName, () =>
|
||||
originalHandler(req)
|
||||
);
|
||||
response = result;
|
||||
}
|
||||
|
||||
if (opts.monitoring?.recordRequests) {
|
||||
performanceMonitor.recordRequest(timer.end(), false);
|
||||
}
|
||||
|
||||
startMonitoringIfEnabled(opts.monitoring?.enabled);
|
||||
const response = await executeRequestWithOptimizations(req, opts, routeName, originalHandler);
|
||||
recordRequestIfEnabled(timer, false, opts.monitoring?.recordRequests);
|
||||
return response;
|
||||
} catch (error) {
|
||||
if (opts.monitoring?.recordRequests) {
|
||||
performanceMonitor.recordRequest(timer.end(), true);
|
||||
}
|
||||
recordRequestIfEnabled(timer, true, opts.monitoring?.recordRequests);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
@ -292,7 +320,7 @@ export function PerformanceOptimized(
|
||||
* Simple caching decorator
|
||||
*/
|
||||
export function Cached(
|
||||
cacheName = "default",
|
||||
_cacheName = "default",
|
||||
ttl: number = 5 * 60 * 1000,
|
||||
keyGenerator?: (...args: unknown[]) => string
|
||||
) {
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
*/
|
||||
|
||||
import { PerformanceObserver, performance } from "node:perf_hooks";
|
||||
import { TIME } from "../constants";
|
||||
import { cacheManager } from "./cache";
|
||||
import { deduplicationManager } from "./deduplication";
|
||||
|
||||
@ -223,7 +222,7 @@ export class PerformanceMonitor {
|
||||
|
||||
console.log(
|
||||
"[Performance Monitor] Started monitoring with interval:",
|
||||
intervalMs + "ms"
|
||||
`${intervalMs}ms`
|
||||
);
|
||||
}
|
||||
|
||||
@ -286,7 +285,7 @@ export class PerformanceMonitor {
|
||||
const eventLoopDelay = performance.now() - start;
|
||||
|
||||
// Event loop utilization (approximated)
|
||||
const eventLoopUtilization = Math.min(
|
||||
const _eventLoopUtilization = Math.min(
|
||||
100,
|
||||
(eventLoopDelay / 16.67) * 100
|
||||
); // 16.67ms = 60fps
|
||||
@ -778,7 +777,7 @@ export class PerformanceUtils {
|
||||
}
|
||||
|
||||
descriptor.value = async function (...args: unknown[]) {
|
||||
const { result, duration } = await PerformanceUtils.measureAsync(
|
||||
const { result } = await PerformanceUtils.measureAsync(
|
||||
metricName,
|
||||
() => originalMethod.apply(this, args)
|
||||
);
|
||||
|
||||
@ -247,7 +247,7 @@ export class PerformanceOptimizer {
|
||||
* Optimize caching performance
|
||||
*/
|
||||
private async optimizeCaching(
|
||||
metrics: PerformanceMetrics
|
||||
_metrics: PerformanceMetrics
|
||||
): Promise<OptimizationResult[]> {
|
||||
const results: OptimizationResult[] = [];
|
||||
|
||||
@ -284,7 +284,7 @@ export class PerformanceOptimizer {
|
||||
* Optimize response times
|
||||
*/
|
||||
private async optimizeResponseTime(
|
||||
metrics: PerformanceMetrics
|
||||
_metrics: PerformanceMetrics
|
||||
): Promise<OptimizationResult[]> {
|
||||
const results: OptimizationResult[] = [];
|
||||
|
||||
@ -510,7 +510,7 @@ export class PerformanceOptimizer {
|
||||
* Handle memory bottleneck
|
||||
*/
|
||||
private async handleMemoryBottleneck(
|
||||
bottleneck: Bottleneck
|
||||
_bottleneck: Bottleneck
|
||||
): Promise<OptimizationResult[]> {
|
||||
const results: OptimizationResult[] = [];
|
||||
|
||||
@ -534,7 +534,7 @@ export class PerformanceOptimizer {
|
||||
* Handle event loop bottleneck
|
||||
*/
|
||||
private async handleEventLoopBottleneck(
|
||||
bottleneck: Bottleneck
|
||||
_bottleneck: Bottleneck
|
||||
): Promise<OptimizationResult[]> {
|
||||
return [
|
||||
{
|
||||
@ -555,7 +555,7 @@ export class PerformanceOptimizer {
|
||||
* Handle cache bottleneck
|
||||
*/
|
||||
private async handleCacheBottleneck(
|
||||
bottleneck: Bottleneck
|
||||
_bottleneck: Bottleneck
|
||||
): Promise<OptimizationResult[]> {
|
||||
// Could implement cache warming or size adjustments
|
||||
return [
|
||||
@ -623,7 +623,7 @@ export class PerformanceOptimizer {
|
||||
|
||||
// Calculate average improvement
|
||||
const improvementRates = this.optimizationHistory
|
||||
.filter((r) => r.result.metrics?.improvement)
|
||||
.filter((r) => r.result.metrics?.improvement !== undefined)
|
||||
.map((r) => r.result.metrics!.improvement);
|
||||
|
||||
const averageImprovementRate =
|
||||
|
||||
Reference in New Issue
Block a user