mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 11:32:13 +01:00
Compare commits
30 Commits
71c8aff125
...
developmen
| Author | SHA1 | Date | |
|---|---|---|---|
| a002d5ef76 | |||
| c4cfe2f389 | |||
| 5b22c0f1f8 | |||
| 1be9ce9dd9 | |||
| a6632d6dfc | |||
| 043aa03534 | |||
| 7e59567f73 | |||
| 9238c9a6af | |||
| 8ffd5a7a2c | |||
| 2dfc49f840 | |||
| 185bb6da58 | |||
| 6f9ac219c2 | |||
| 601e2e4026 | |||
| 9a3741cd01 | |||
| f3f63943a8 | |||
| 49a75f5ede | |||
| 5c1ced5900 | |||
| 50b230aa9b | |||
| 1dd618b666 | |||
| d7ac0ba208 | |||
| ab2c75b736 | |||
| 8c43a35632 | |||
| 8f3c1e0f7c | |||
| 0e5ac69d45 | |||
| f964d6a078 | |||
| 944431fea3 | |||
| 1afe15df85 | |||
| 9e095e1a43 | |||
| a9e4145001 | |||
| 3196dabdf2 |
1
.clinerules/pnpm-not-npm.md
Normal file
1
.clinerules/pnpm-not-npm.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
Use pnpm to manage this project, not npm!
|
||||||
@ -1,9 +0,0 @@
|
|||||||
# Development environment settings
|
|
||||||
# This file ensures NextAuth always has necessary environment variables in development
|
|
||||||
|
|
||||||
# NextAuth.js configuration
|
|
||||||
NEXTAUTH_URL=http://192.168.1.2:3000
|
|
||||||
NEXTAUTH_SECRET=this_is_a_fixed_secret_for_development_only
|
|
||||||
NODE_ENV=development
|
|
||||||
|
|
||||||
# Database connection - already configured in your prisma/schema.prisma
|
|
||||||
26
.env.example
Normal file
26
.env.example
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Development environment settings
|
||||||
|
# This file ensures NextAuth always has necessary environment variables in development
|
||||||
|
|
||||||
|
# NextAuth.js configuration
|
||||||
|
NEXTAUTH_URL="http://localhost:3000"
|
||||||
|
NEXTAUTH_SECRET="this_is_a_fixed_secret_for_development_only"
|
||||||
|
NODE_ENV="development"
|
||||||
|
|
||||||
|
# OpenAI API key for session processing
|
||||||
|
# Add your API key here: OPENAI_API_KEY=sk-...
|
||||||
|
OPENAI_API_KEY="your_openai_api_key_here"
|
||||||
|
|
||||||
|
# Database connection - already configured in your prisma/schema.prisma
|
||||||
|
|
||||||
|
# Scheduler Configuration
|
||||||
|
SCHEDULER_ENABLED="false" # Enable/disable all schedulers (false for dev, true for production)
|
||||||
|
CSV_IMPORT_INTERVAL="*/15 * * * *" # Cron expression for CSV imports (every 15 minutes)
|
||||||
|
IMPORT_PROCESSING_INTERVAL="*/5 * * * *" # Cron expression for processing imports to sessions (every 5 minutes)
|
||||||
|
IMPORT_PROCESSING_BATCH_SIZE="50" # Number of imports to process at once
|
||||||
|
SESSION_PROCESSING_INTERVAL="0 * * * *" # Cron expression for AI session processing (every hour)
|
||||||
|
SESSION_PROCESSING_BATCH_SIZE="0" # 0 = unlimited sessions, >0 = specific limit
|
||||||
|
SESSION_PROCESSING_CONCURRENCY="5" # How many sessions to process in parallel
|
||||||
|
|
||||||
|
# Postgres Database Configuration
|
||||||
|
DATABASE_URL_TEST="postgresql://"
|
||||||
|
DATABASE_URL="postgresql://"
|
||||||
29
.env.local.example
Normal file
29
.env.local.example
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# Copy this file to .env.local and configure as needed
|
||||||
|
|
||||||
|
# NextAuth.js configuration
|
||||||
|
NEXTAUTH_URL="http://localhost:3000"
|
||||||
|
NEXTAUTH_SECRET="your_secret_key_here"
|
||||||
|
NODE_ENV="development"
|
||||||
|
|
||||||
|
# OpenAI API key for session processing
|
||||||
|
OPENAI_API_KEY="your_openai_api_key_here"
|
||||||
|
|
||||||
|
# Scheduler Configuration
|
||||||
|
SCHEDULER_ENABLED="true" # Set to false to disable all schedulers during development
|
||||||
|
CSV_IMPORT_INTERVAL="*/15 * * * *" # Every 15 minutes (cron format)
|
||||||
|
IMPORT_PROCESSING_INTERVAL="*/5 * * * *" # Every 5 minutes (cron format) - converts imports to sessions
|
||||||
|
IMPORT_PROCESSING_BATCH_SIZE="50" # Number of imports to process at once
|
||||||
|
SESSION_PROCESSING_INTERVAL="0 * * * *" # Every hour (cron format) - AI processing
|
||||||
|
SESSION_PROCESSING_BATCH_SIZE="0" # 0 = process all sessions, >0 = limit number
|
||||||
|
SESSION_PROCESSING_CONCURRENCY="5" # Number of sessions to process in parallel
|
||||||
|
|
||||||
|
# Postgres Database Configuration
|
||||||
|
DATABASE_URL_TEST="postgresql://"
|
||||||
|
DATABASE_URL="postgresql://"
|
||||||
|
|
||||||
|
# Example configurations:
|
||||||
|
# - For development (no schedulers): SCHEDULER_ENABLED=false
|
||||||
|
# - For testing (every 5 minutes): CSV_IMPORT_INTERVAL=*/5 * * * *
|
||||||
|
# - For faster import processing: IMPORT_PROCESSING_INTERVAL=*/2 * * * *
|
||||||
|
# - For limited processing: SESSION_PROCESSING_BATCH_SIZE=10
|
||||||
|
# - For high concurrency: SESSION_PROCESSING_CONCURRENCY=10
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -261,3 +261,7 @@ Thumbs.db
|
|||||||
/playwright-report/
|
/playwright-report/
|
||||||
/blob-report/
|
/blob-report/
|
||||||
/playwright/.cache/
|
/playwright/.cache/
|
||||||
|
|
||||||
|
# OpenAI API request samples
|
||||||
|
sample-openai-request.json
|
||||||
|
admin-user.txt
|
||||||
96
TODO.md
96
TODO.md
@ -1,96 +0,0 @@
|
|||||||
# TODO.md
|
|
||||||
|
|
||||||
## Dashboard Integration
|
|
||||||
|
|
||||||
- [ ] **Resolve `GeographicMap.tsx` and `ResponseTimeDistribution.tsx` data simulation**
|
|
||||||
- Investigate integrating real data sources with server-side analytics
|
|
||||||
- Replace simulated data mentioned in `docs/dashboard-components.md`
|
|
||||||
|
|
||||||
## Component Specific
|
|
||||||
|
|
||||||
- [ ] **Implement robust emailing of temporary passwords**
|
|
||||||
- File: `pages/api/dashboard/users.ts`
|
|
||||||
- Set up proper email service integration
|
|
||||||
|
|
||||||
- [x] **Session page improvements** ✅
|
|
||||||
- File: `app/dashboard/sessions/page.tsx`
|
|
||||||
- Implemented pagination, advanced filtering, and sorting
|
|
||||||
|
|
||||||
## File Cleanup
|
|
||||||
|
|
||||||
- [x] **Remove backup files** ✅
|
|
||||||
- Reviewed and removed `.bak` and `.new` files after integration
|
|
||||||
- Cleaned up `GeographicMap.tsx.bak`, `SessionDetails.tsx.bak`, `SessionDetails.tsx.new`
|
|
||||||
|
|
||||||
## Database Schema Improvements
|
|
||||||
|
|
||||||
- [ ] **Update EndTime field**
|
|
||||||
- Make `endTime` field nullable in Prisma schema to match TypeScript interfaces
|
|
||||||
|
|
||||||
- [ ] **Add database indices**
|
|
||||||
- Add appropriate indices to improve query performance
|
|
||||||
- Focus on dashboard metrics and session listing queries
|
|
||||||
|
|
||||||
- [ ] **Implement production email service**
|
|
||||||
- Replace console logging in `lib/sendEmail.ts`
|
|
||||||
- Consider providers: Nodemailer, SendGrid, AWS SES
|
|
||||||
|
|
||||||
## General Enhancements & Features
|
|
||||||
|
|
||||||
- [ ] **Real-time updates**
|
|
||||||
- Implement for dashboard and session list
|
|
||||||
- Consider WebSockets or Server-Sent Events
|
|
||||||
|
|
||||||
- [ ] **Data export functionality**
|
|
||||||
- Allow users (especially admins) to export session data
|
|
||||||
- Support CSV format initially
|
|
||||||
|
|
||||||
- [ ] **Customizable dashboard**
|
|
||||||
- Allow users to customize dashboard view
|
|
||||||
- Let users choose which metrics/charts are most important
|
|
||||||
|
|
||||||
## Testing & Quality Assurance
|
|
||||||
|
|
||||||
- [ ] **Comprehensive testing suite**
|
|
||||||
- [ ] Unit tests for utility functions and API logic
|
|
||||||
- [ ] Integration tests for API endpoints with database
|
|
||||||
- [ ] End-to-end tests for user flows (Playwright or Cypress)
|
|
||||||
|
|
||||||
- [ ] **Error monitoring and logging**
|
|
||||||
- Integrate robust error monitoring service (Sentry)
|
|
||||||
- Enhance server-side logging
|
|
||||||
|
|
||||||
- [ ] **Accessibility improvements**
|
|
||||||
- Review application against WCAG guidelines
|
|
||||||
- Improve keyboard navigation and screen reader compatibility
|
|
||||||
- Check color contrast ratios
|
|
||||||
|
|
||||||
## Security Enhancements
|
|
||||||
|
|
||||||
- [x] **Password reset functionality** ✅
|
|
||||||
- Implemented secure password reset mechanism
|
|
||||||
- Files: `app/forgot-password/page.tsx`, `app/reset-password/page.tsx`, `pages/api/forgot-password.ts`, `pages/api/reset-password.ts`
|
|
||||||
|
|
||||||
- [ ] **Two-Factor Authentication (2FA)**
|
|
||||||
- Consider adding 2FA, especially for admin accounts
|
|
||||||
|
|
||||||
- [ ] **Input validation and sanitization**
|
|
||||||
- Review all user inputs (API request bodies, query parameters)
|
|
||||||
- Ensure proper validation and sanitization
|
|
||||||
|
|
||||||
## Code Quality & Development
|
|
||||||
|
|
||||||
- [ ] **Code review process**
|
|
||||||
- Enforce code reviews for all changes
|
|
||||||
|
|
||||||
- [ ] **Environment configuration**
|
|
||||||
- Ensure secure management of environment-specific configurations
|
|
||||||
|
|
||||||
- [ ] **Dependency management**
|
|
||||||
- Periodically review dependencies for vulnerabilities
|
|
||||||
- Keep dependencies updated
|
|
||||||
|
|
||||||
- [ ] **Documentation updates**
|
|
||||||
- [ ] Ensure `docs/dashboard-components.md` reflects actual implementations
|
|
||||||
- [ ] Verify "Dashboard Enhancements" are consistently applied
|
|
||||||
- [ ] Update documentation for improved layout and visual hierarchies
|
|
||||||
136
app/api/admin/refresh-sessions/route.ts
Normal file
136
app/api/admin/refresh-sessions/route.ts
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { fetchAndParseCsv } from "../../../../lib/csvFetcher";
|
||||||
|
import { processQueuedImports } from "../../../../lib/importProcessor";
|
||||||
|
import { prisma } from "../../../../lib/prisma";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
let { companyId } = body;
|
||||||
|
|
||||||
|
if (!companyId) {
|
||||||
|
// Try to get user from prisma based on session cookie
|
||||||
|
try {
|
||||||
|
const session = await prisma.session.findFirst({
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
where: {
|
||||||
|
/* Add session check criteria here */
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (session) {
|
||||||
|
companyId = session.companyId;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Log error for server-side debugging
|
||||||
|
const errorMessage =
|
||||||
|
error instanceof Error ? error.message : String(error);
|
||||||
|
// Use a server-side logging approach instead of console
|
||||||
|
process.stderr.write(`Error fetching session: ${errorMessage}\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!companyId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Company ID is required" },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const company = await prisma.company.findUnique({ where: { id: companyId } });
|
||||||
|
if (!company) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Company not found" },
|
||||||
|
{ status: 404 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawSessionData = await fetchAndParseCsv(
|
||||||
|
company.csvUrl,
|
||||||
|
company.csvUsername as string | undefined,
|
||||||
|
company.csvPassword as string | undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
let importedCount = 0;
|
||||||
|
|
||||||
|
// Create SessionImport records for new data
|
||||||
|
for (const rawSession of rawSessionData) {
|
||||||
|
try {
|
||||||
|
// Use upsert to handle duplicates gracefully
|
||||||
|
await prisma.sessionImport.upsert({
|
||||||
|
where: {
|
||||||
|
companyId_externalSessionId: {
|
||||||
|
companyId: company.id,
|
||||||
|
externalSessionId: rawSession.externalSessionId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
// Update existing record with latest data
|
||||||
|
startTimeRaw: rawSession.startTimeRaw,
|
||||||
|
endTimeRaw: rawSession.endTimeRaw,
|
||||||
|
ipAddress: rawSession.ipAddress,
|
||||||
|
countryCode: rawSession.countryCode,
|
||||||
|
language: rawSession.language,
|
||||||
|
messagesSent: rawSession.messagesSent,
|
||||||
|
sentimentRaw: rawSession.sentimentRaw,
|
||||||
|
escalatedRaw: rawSession.escalatedRaw,
|
||||||
|
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||||
|
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||||
|
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||||
|
tokens: rawSession.tokens,
|
||||||
|
tokensEur: rawSession.tokensEur,
|
||||||
|
category: rawSession.category,
|
||||||
|
initialMessage: rawSession.initialMessage,
|
||||||
|
// Status tracking now handled by ProcessingStatusManager
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
companyId: company.id,
|
||||||
|
externalSessionId: rawSession.externalSessionId,
|
||||||
|
startTimeRaw: rawSession.startTimeRaw,
|
||||||
|
endTimeRaw: rawSession.endTimeRaw,
|
||||||
|
ipAddress: rawSession.ipAddress,
|
||||||
|
countryCode: rawSession.countryCode,
|
||||||
|
language: rawSession.language,
|
||||||
|
messagesSent: rawSession.messagesSent,
|
||||||
|
sentimentRaw: rawSession.sentimentRaw,
|
||||||
|
escalatedRaw: rawSession.escalatedRaw,
|
||||||
|
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||||
|
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||||
|
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||||
|
tokens: rawSession.tokens,
|
||||||
|
tokensEur: rawSession.tokensEur,
|
||||||
|
category: rawSession.category,
|
||||||
|
initialMessage: rawSession.initialMessage,
|
||||||
|
// Status tracking now handled by ProcessingStatusManager
|
||||||
|
},
|
||||||
|
});
|
||||||
|
importedCount++;
|
||||||
|
} catch (error) {
|
||||||
|
// Log individual session import errors but continue processing
|
||||||
|
process.stderr.write(
|
||||||
|
`Failed to import session ${rawSession.externalSessionId}: ${error}\n`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Immediately process the queued imports to create Session records
|
||||||
|
console.log('[Refresh API] Processing queued imports...');
|
||||||
|
await processQueuedImports(100); // Process up to 100 imports immediately
|
||||||
|
|
||||||
|
// Count how many sessions were created
|
||||||
|
const sessionCount = await prisma.session.count({
|
||||||
|
where: { companyId: company.id }
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
ok: true,
|
||||||
|
imported: importedCount,
|
||||||
|
total: rawSessionData.length,
|
||||||
|
sessions: sessionCount,
|
||||||
|
message: `Successfully imported ${importedCount} records and processed them into sessions. Total sessions: ${sessionCount}`
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
const error = e instanceof Error ? e.message : "An unknown error occurred";
|
||||||
|
return NextResponse.json({ error }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
105
app/api/admin/trigger-processing/route.ts
Normal file
105
app/api/admin/trigger-processing/route.ts
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { getServerSession } from "next-auth";
|
||||||
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
|
import { prisma } from "../../../../lib/prisma";
|
||||||
|
import { processUnprocessedSessions } from "../../../../lib/processingScheduler";
|
||||||
|
import { ProcessingStatusManager } from "../../../../lib/processingStatusManager";
|
||||||
|
import { ProcessingStage } from "@prisma/client";
|
||||||
|
|
||||||
|
interface SessionUser {
|
||||||
|
email: string;
|
||||||
|
name?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionData {
|
||||||
|
user: SessionUser;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const session = (await getServerSession(authOptions)) as SessionData | null;
|
||||||
|
|
||||||
|
if (!session?.user) {
|
||||||
|
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email },
|
||||||
|
include: { company: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user found" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user has ADMIN role
|
||||||
|
if (user.role !== "ADMIN") {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Admin access required" },
|
||||||
|
{ status: 403 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get optional parameters from request body
|
||||||
|
const body = await request.json();
|
||||||
|
const { batchSize, maxConcurrency } = body;
|
||||||
|
|
||||||
|
// Validate parameters
|
||||||
|
const validatedBatchSize = batchSize && batchSize > 0 ? parseInt(batchSize) : null;
|
||||||
|
const validatedMaxConcurrency = maxConcurrency && maxConcurrency > 0 ? parseInt(maxConcurrency) : 5;
|
||||||
|
|
||||||
|
// Check how many sessions need AI processing using the new status system
|
||||||
|
const sessionsNeedingAI = await ProcessingStatusManager.getSessionsNeedingProcessing(
|
||||||
|
ProcessingStage.AI_ANALYSIS,
|
||||||
|
1000 // Get count only
|
||||||
|
);
|
||||||
|
|
||||||
|
// Filter to sessions for this company
|
||||||
|
const companySessionsNeedingAI = sessionsNeedingAI.filter(
|
||||||
|
statusRecord => statusRecord.session.companyId === user.companyId
|
||||||
|
);
|
||||||
|
|
||||||
|
const unprocessedCount = companySessionsNeedingAI.length;
|
||||||
|
|
||||||
|
if (unprocessedCount === 0) {
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
message: "No sessions requiring AI processing found",
|
||||||
|
unprocessedCount: 0,
|
||||||
|
processedCount: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start processing (this will run asynchronously)
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Note: We're calling the function but not awaiting it to avoid timeout
|
||||||
|
// The processing will continue in the background
|
||||||
|
processUnprocessedSessions(validatedBatchSize, validatedMaxConcurrency)
|
||||||
|
.then(() => {
|
||||||
|
console.log(`[Manual Trigger] Processing completed for company ${user.companyId}`);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error(`[Manual Trigger] Processing failed for company ${user.companyId}:`, error);
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
message: `Started processing ${unprocessedCount} unprocessed sessions`,
|
||||||
|
unprocessedCount,
|
||||||
|
batchSize: validatedBatchSize || unprocessedCount,
|
||||||
|
maxConcurrency: validatedMaxConcurrency,
|
||||||
|
startedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Manual Trigger] Error:", error);
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: "Failed to trigger processing",
|
||||||
|
details: error instanceof Error ? error.message : String(error),
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,6 +1,6 @@
|
|||||||
import NextAuth, { NextAuthOptions } from "next-auth";
|
import NextAuth, { NextAuthOptions } from "next-auth";
|
||||||
import CredentialsProvider from "next-auth/providers/credentials";
|
import CredentialsProvider from "next-auth/providers/credentials";
|
||||||
import { prisma } from "../../../lib/prisma";
|
import { prisma } from "../../../../lib/prisma";
|
||||||
import bcrypt from "bcryptjs";
|
import bcrypt from "bcryptjs";
|
||||||
|
|
||||||
// Define the shape of the JWT token
|
// Define the shape of the JWT token
|
||||||
@ -101,4 +101,6 @@ export const authOptions: NextAuthOptions = {
|
|||||||
debug: process.env.NODE_ENV === "development",
|
debug: process.env.NODE_ENV === "development",
|
||||||
};
|
};
|
||||||
|
|
||||||
export default NextAuth(authOptions);
|
const handler = NextAuth(authOptions);
|
||||||
|
|
||||||
|
export { handler as GET, handler as POST };
|
||||||
51
app/api/dashboard/config/route.ts
Normal file
51
app/api/dashboard/config/route.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { getServerSession } from "next-auth";
|
||||||
|
import { prisma } from "../../../../lib/prisma";
|
||||||
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const session = await getServerSession(authOptions);
|
||||||
|
if (!session?.user) {
|
||||||
|
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email as string },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get company data
|
||||||
|
const company = await prisma.company.findUnique({
|
||||||
|
where: { id: user.companyId },
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json({ company });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const session = await getServerSession(authOptions);
|
||||||
|
if (!session?.user) {
|
||||||
|
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email as string },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json();
|
||||||
|
const { csvUrl } = body;
|
||||||
|
|
||||||
|
await prisma.company.update({
|
||||||
|
where: { id: user.companyId },
|
||||||
|
data: { csvUrl },
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json({ ok: true });
|
||||||
|
}
|
||||||
138
app/api/dashboard/metrics/route.ts
Normal file
138
app/api/dashboard/metrics/route.ts
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { getServerSession } from "next-auth";
|
||||||
|
import { prisma } from "../../../../lib/prisma";
|
||||||
|
import { sessionMetrics } from "../../../../lib/metrics";
|
||||||
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
|
import { ChatSession } from "../../../../lib/types";
|
||||||
|
|
||||||
|
interface SessionUser {
|
||||||
|
email: string;
|
||||||
|
name?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionData {
|
||||||
|
user: SessionUser;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const session = (await getServerSession(authOptions)) as SessionData | null;
|
||||||
|
if (!session?.user) {
|
||||||
|
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email },
|
||||||
|
include: { company: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get date range from query parameters
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const startDate = searchParams.get("startDate");
|
||||||
|
const endDate = searchParams.get("endDate");
|
||||||
|
|
||||||
|
// Build where clause with optional date filtering
|
||||||
|
const whereClause: any = {
|
||||||
|
companyId: user.companyId,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (startDate && endDate) {
|
||||||
|
whereClause.startTime = {
|
||||||
|
gte: new Date(startDate),
|
||||||
|
lte: new Date(endDate + 'T23:59:59.999Z'), // Include full end date
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const prismaSessions = await prisma.session.findMany({
|
||||||
|
where: whereClause,
|
||||||
|
include: {
|
||||||
|
messages: true, // Include messages for question extraction
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert Prisma sessions to ChatSession[] type for sessionMetrics
|
||||||
|
const chatSessions: ChatSession[] = prismaSessions.map((ps) => ({
|
||||||
|
id: ps.id, // Map Prisma's id to ChatSession.id
|
||||||
|
sessionId: ps.id, // Map Prisma's id to ChatSession.sessionId
|
||||||
|
companyId: ps.companyId,
|
||||||
|
startTime: new Date(ps.startTime), // Ensure startTime is a Date object
|
||||||
|
endTime: ps.endTime ? new Date(ps.endTime) : null, // Ensure endTime is a Date object or null
|
||||||
|
transcriptContent: "", // Session model doesn't have transcriptContent field
|
||||||
|
createdAt: new Date(ps.createdAt), // Map Prisma's createdAt
|
||||||
|
updatedAt: new Date(ps.createdAt), // Use createdAt for updatedAt as Session model doesn't have updatedAt
|
||||||
|
category: ps.category || undefined,
|
||||||
|
language: ps.language || undefined,
|
||||||
|
country: ps.country || undefined,
|
||||||
|
ipAddress: ps.ipAddress || undefined,
|
||||||
|
sentiment: ps.sentiment === null ? undefined : ps.sentiment,
|
||||||
|
messagesSent: ps.messagesSent === null ? undefined : ps.messagesSent, // Handle null messagesSent
|
||||||
|
avgResponseTime:
|
||||||
|
ps.avgResponseTime === null ? undefined : ps.avgResponseTime,
|
||||||
|
escalated: ps.escalated || false,
|
||||||
|
forwardedHr: ps.forwardedHr || false,
|
||||||
|
initialMsg: ps.initialMsg || undefined,
|
||||||
|
fullTranscriptUrl: ps.fullTranscriptUrl || undefined,
|
||||||
|
summary: ps.summary || undefined, // Include summary field
|
||||||
|
messages: ps.messages || [], // Include messages for question extraction
|
||||||
|
// userId is missing in Prisma Session model, assuming it's not strictly needed for metrics or can be null
|
||||||
|
userId: undefined, // Or some other default/mapping if available
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Pass company config to metrics
|
||||||
|
const companyConfigForMetrics = {
|
||||||
|
sentimentAlert:
|
||||||
|
user.company.sentimentAlert === null
|
||||||
|
? undefined
|
||||||
|
: user.company.sentimentAlert,
|
||||||
|
};
|
||||||
|
|
||||||
|
const metrics = sessionMetrics(chatSessions, companyConfigForMetrics);
|
||||||
|
|
||||||
|
// Calculate date range from the FILTERED sessions to match what's actually displayed
|
||||||
|
let dateRange: { minDate: string; maxDate: string } | null = null;
|
||||||
|
let availableDataRange: { minDate: string; maxDate: string } | null = null;
|
||||||
|
|
||||||
|
// Get the full available range for reference
|
||||||
|
const allSessions = await prisma.session.findMany({
|
||||||
|
where: {
|
||||||
|
companyId: user.companyId,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
startTime: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
startTime: 'asc',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (allSessions.length > 0) {
|
||||||
|
availableDataRange = {
|
||||||
|
minDate: allSessions[0].startTime.toISOString().split('T')[0], // First session date
|
||||||
|
maxDate: allSessions[allSessions.length - 1].startTime.toISOString().split('T')[0] // Last session date
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate date range from the filtered sessions (what's actually being displayed)
|
||||||
|
if (prismaSessions.length > 0) {
|
||||||
|
const sortedFilteredSessions = prismaSessions.sort((a, b) =>
|
||||||
|
new Date(a.startTime).getTime() - new Date(b.startTime).getTime()
|
||||||
|
);
|
||||||
|
dateRange = {
|
||||||
|
minDate: sortedFilteredSessions[0].startTime.toISOString().split('T')[0],
|
||||||
|
maxDate: sortedFilteredSessions[sortedFilteredSessions.length - 1].startTime.toISOString().split('T')[0]
|
||||||
|
};
|
||||||
|
} else if (availableDataRange) {
|
||||||
|
// If no filtered sessions but we have available data, use the available range
|
||||||
|
dateRange = availableDataRange;
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
metrics,
|
||||||
|
csvUrl: user.company.csvUrl,
|
||||||
|
company: user.company,
|
||||||
|
dateRange,
|
||||||
|
});
|
||||||
|
}
|
||||||
@ -1,23 +1,14 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from "next";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { getServerSession } from "next-auth/next";
|
import { getServerSession } from "next-auth/next";
|
||||||
import { authOptions } from "../auth/[...nextauth]";
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
import { prisma } from "../../../lib/prisma";
|
import { prisma } from "../../../../lib/prisma";
|
||||||
import { SessionFilterOptions } from "../../../lib/types";
|
import { SessionFilterOptions } from "../../../../lib/types";
|
||||||
|
|
||||||
export default async function handler(
|
export async function GET(request: NextRequest) {
|
||||||
req: NextApiRequest,
|
const authSession = await getServerSession(authOptions);
|
||||||
res: NextApiResponse<
|
|
||||||
SessionFilterOptions | { error: string; details?: string }
|
|
||||||
>
|
|
||||||
) {
|
|
||||||
if (req.method !== "GET") {
|
|
||||||
return res.status(405).json({ error: "Method not allowed" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const authSession = await getServerSession(req, res, authOptions);
|
|
||||||
|
|
||||||
if (!authSession || !authSession.user?.companyId) {
|
if (!authSession || !authSession.user?.companyId) {
|
||||||
return res.status(401).json({ error: "Unauthorized" });
|
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||||
}
|
}
|
||||||
|
|
||||||
const companyId = authSession.user.companyId;
|
const companyId = authSession.user.companyId;
|
||||||
@ -62,15 +53,19 @@ export default async function handler(
|
|||||||
.map((s) => s.language)
|
.map((s) => s.language)
|
||||||
.filter(Boolean) as string[]; // Filter out any nulls and assert as string[]
|
.filter(Boolean) as string[]; // Filter out any nulls and assert as string[]
|
||||||
|
|
||||||
return res
|
return NextResponse.json({
|
||||||
.status(200)
|
categories: distinctCategories,
|
||||||
.json({ categories: distinctCategories, languages: distinctLanguages });
|
languages: distinctLanguages
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage =
|
const errorMessage =
|
||||||
error instanceof Error ? error.message : "An unknown error occurred";
|
error instanceof Error ? error.message : "An unknown error occurred";
|
||||||
return res.status(500).json({
|
return NextResponse.json(
|
||||||
error: "Failed to fetch filter options",
|
{
|
||||||
details: errorMessage,
|
error: "Failed to fetch filter options",
|
||||||
});
|
details: errorMessage,
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1,28 +1,35 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from "next";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { prisma } from "../../../../lib/prisma";
|
import { prisma } from "../../../../../lib/prisma";
|
||||||
import { ChatSession } from "../../../../lib/types";
|
import { ChatSession } from "../../../../../lib/types";
|
||||||
|
|
||||||
export default async function handler(
|
export async function GET(
|
||||||
req: NextApiRequest,
|
request: NextRequest,
|
||||||
res: NextApiResponse
|
{ params }: { params: { id: string } }
|
||||||
) {
|
) {
|
||||||
if (req.method !== "GET") {
|
const { id } = params;
|
||||||
return res.status(405).json({ error: "Method not allowed" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const { id } = req.query;
|
if (!id) {
|
||||||
|
return NextResponse.json(
|
||||||
if (!id || typeof id !== "string") {
|
{ error: "Session ID is required" },
|
||||||
return res.status(400).json({ error: "Session ID is required" });
|
{ status: 400 }
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const prismaSession = await prisma.session.findUnique({
|
const prismaSession = await prisma.session.findUnique({
|
||||||
where: { id },
|
where: { id },
|
||||||
|
include: {
|
||||||
|
messages: {
|
||||||
|
orderBy: { order: "asc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!prismaSession) {
|
if (!prismaSession) {
|
||||||
return res.status(404).json({ error: "Session not found" });
|
return NextResponse.json(
|
||||||
|
{ error: "Session not found" },
|
||||||
|
{ status: 404 }
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Map Prisma session object to ChatSession type
|
// Map Prisma session object to ChatSession type
|
||||||
@ -50,19 +57,29 @@ export default async function handler(
|
|||||||
avgResponseTime: prismaSession.avgResponseTime ?? null,
|
avgResponseTime: prismaSession.avgResponseTime ?? null,
|
||||||
escalated: prismaSession.escalated ?? undefined,
|
escalated: prismaSession.escalated ?? undefined,
|
||||||
forwardedHr: prismaSession.forwardedHr ?? undefined,
|
forwardedHr: prismaSession.forwardedHr ?? undefined,
|
||||||
tokens: prismaSession.tokens ?? undefined,
|
|
||||||
tokensEur: prismaSession.tokensEur ?? undefined,
|
|
||||||
initialMsg: prismaSession.initialMsg ?? undefined,
|
initialMsg: prismaSession.initialMsg ?? undefined,
|
||||||
fullTranscriptUrl: prismaSession.fullTranscriptUrl ?? null,
|
fullTranscriptUrl: prismaSession.fullTranscriptUrl ?? null,
|
||||||
transcriptContent: prismaSession.transcriptContent ?? null,
|
summary: prismaSession.summary ?? null, // New field
|
||||||
|
transcriptContent: null, // Not available in Session model
|
||||||
|
messages:
|
||||||
|
prismaSession.messages?.map((msg) => ({
|
||||||
|
id: msg.id,
|
||||||
|
sessionId: msg.sessionId,
|
||||||
|
timestamp: msg.timestamp ? new Date(msg.timestamp) : new Date(),
|
||||||
|
role: msg.role,
|
||||||
|
content: msg.content,
|
||||||
|
order: msg.order,
|
||||||
|
createdAt: new Date(msg.createdAt),
|
||||||
|
})) ?? [], // New field - parsed messages
|
||||||
};
|
};
|
||||||
|
|
||||||
return res.status(200).json({ session });
|
return NextResponse.json({ session });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage =
|
const errorMessage =
|
||||||
error instanceof Error ? error.message : "An unknown error occurred";
|
error instanceof Error ? error.message : "An unknown error occurred";
|
||||||
return res
|
return NextResponse.json(
|
||||||
.status(500)
|
{ error: "Failed to fetch session", details: errorMessage },
|
||||||
.json({ error: "Failed to fetch session", details: errorMessage });
|
{ status: 500 }
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1,40 +1,33 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from "next";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { getServerSession } from "next-auth/next";
|
import { getServerSession } from "next-auth/next";
|
||||||
import { authOptions } from "../auth/[...nextauth]";
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
import { prisma } from "../../../lib/prisma";
|
import { prisma } from "../../../../lib/prisma";
|
||||||
import {
|
import {
|
||||||
ChatSession,
|
ChatSession,
|
||||||
SessionApiResponse,
|
SessionApiResponse,
|
||||||
SessionQuery,
|
SessionQuery,
|
||||||
} from "../../../lib/types";
|
} from "../../../../lib/types";
|
||||||
import { Prisma } from "@prisma/client";
|
import { Prisma } from "@prisma/client";
|
||||||
|
|
||||||
export default async function handler(
|
export async function GET(request: NextRequest) {
|
||||||
req: NextApiRequest,
|
const authSession = await getServerSession(authOptions);
|
||||||
res: NextApiResponse<SessionApiResponse | { error: string; details?: string }>
|
|
||||||
) {
|
|
||||||
if (req.method !== "GET") {
|
|
||||||
return res.status(405).json({ error: "Method not allowed" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const authSession = await getServerSession(req, res, authOptions);
|
|
||||||
|
|
||||||
if (!authSession || !authSession.user?.companyId) {
|
if (!authSession || !authSession.user?.companyId) {
|
||||||
return res.status(401).json({ error: "Unauthorized" });
|
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||||
}
|
}
|
||||||
|
|
||||||
const companyId = authSession.user.companyId;
|
const companyId = authSession.user.companyId;
|
||||||
const {
|
const { searchParams } = new URL(request.url);
|
||||||
searchTerm,
|
|
||||||
category,
|
const searchTerm = searchParams.get("searchTerm");
|
||||||
language,
|
const category = searchParams.get("category");
|
||||||
startDate,
|
const language = searchParams.get("language");
|
||||||
endDate,
|
const startDate = searchParams.get("startDate");
|
||||||
sortKey,
|
const endDate = searchParams.get("endDate");
|
||||||
sortOrder,
|
const sortKey = searchParams.get("sortKey");
|
||||||
page: queryPage,
|
const sortOrder = searchParams.get("sortOrder");
|
||||||
pageSize: queryPageSize,
|
const queryPage = searchParams.get("page");
|
||||||
} = req.query as SessionQuery;
|
const queryPageSize = searchParams.get("pageSize");
|
||||||
|
|
||||||
const page = Number(queryPage) || 1;
|
const page = Number(queryPage) || 1;
|
||||||
const pageSize = Number(queryPageSize) || 10;
|
const pageSize = Number(queryPageSize) || 10;
|
||||||
@ -43,38 +36,34 @@ export default async function handler(
|
|||||||
const whereClause: Prisma.SessionWhereInput = { companyId };
|
const whereClause: Prisma.SessionWhereInput = { companyId };
|
||||||
|
|
||||||
// Search Term
|
// Search Term
|
||||||
if (
|
if (searchTerm && searchTerm.trim() !== "") {
|
||||||
searchTerm &&
|
|
||||||
typeof searchTerm === "string" &&
|
|
||||||
searchTerm.trim() !== ""
|
|
||||||
) {
|
|
||||||
const searchConditions = [
|
const searchConditions = [
|
||||||
{ id: { contains: searchTerm } },
|
{ id: { contains: searchTerm } },
|
||||||
{ category: { contains: searchTerm } },
|
|
||||||
{ initialMsg: { contains: searchTerm } },
|
{ initialMsg: { contains: searchTerm } },
|
||||||
{ transcriptContent: { contains: searchTerm } },
|
{ summary: { contains: searchTerm } },
|
||||||
];
|
];
|
||||||
whereClause.OR = searchConditions;
|
whereClause.OR = searchConditions;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Category Filter
|
// Category Filter
|
||||||
if (category && typeof category === "string" && category.trim() !== "") {
|
if (category && category.trim() !== "") {
|
||||||
whereClause.category = category;
|
// Cast to SessionCategory enum if it's a valid value
|
||||||
|
whereClause.category = category as any;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Language Filter
|
// Language Filter
|
||||||
if (language && typeof language === "string" && language.trim() !== "") {
|
if (language && language.trim() !== "") {
|
||||||
whereClause.language = language;
|
whereClause.language = language;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Date Range Filter
|
// Date Range Filter
|
||||||
if (startDate && typeof startDate === "string") {
|
if (startDate) {
|
||||||
whereClause.startTime = {
|
whereClause.startTime = {
|
||||||
...((whereClause.startTime as object) || {}),
|
...((whereClause.startTime as object) || {}),
|
||||||
gte: new Date(startDate),
|
gte: new Date(startDate),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (endDate && typeof endDate === "string") {
|
if (endDate) {
|
||||||
const inclusiveEndDate = new Date(endDate);
|
const inclusiveEndDate = new Date(endDate);
|
||||||
inclusiveEndDate.setDate(inclusiveEndDate.getDate() + 1);
|
inclusiveEndDate.setDate(inclusiveEndDate.getDate() + 1);
|
||||||
whereClause.startTime = {
|
whereClause.startTime = {
|
||||||
@ -98,7 +87,7 @@ export default async function handler(
|
|||||||
| Prisma.SessionOrderByWithRelationInput[];
|
| Prisma.SessionOrderByWithRelationInput[];
|
||||||
|
|
||||||
const primarySortField =
|
const primarySortField =
|
||||||
sortKey && typeof sortKey === "string" && validSortKeys[sortKey]
|
sortKey && validSortKeys[sortKey]
|
||||||
? validSortKeys[sortKey]
|
? validSortKeys[sortKey]
|
||||||
: "startTime"; // Default to startTime field if sortKey is invalid/missing
|
: "startTime"; // Default to startTime field if sortKey is invalid/missing
|
||||||
|
|
||||||
@ -115,9 +104,6 @@ export default async function handler(
|
|||||||
{ startTime: "desc" },
|
{ startTime: "desc" },
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
// Note: If sortKey was initially undefined or invalid, primarySortField defaults to "startTime",
|
|
||||||
// and primarySortOrder defaults to "desc". This makes orderByCondition = { startTime: "desc" },
|
|
||||||
// which is the correct overall default sort.
|
|
||||||
|
|
||||||
const prismaSessions = await prisma.session.findMany({
|
const prismaSessions = await prisma.session.findMany({
|
||||||
where: whereClause,
|
where: whereClause,
|
||||||
@ -146,19 +132,18 @@ export default async function handler(
|
|||||||
avgResponseTime: ps.avgResponseTime ?? null,
|
avgResponseTime: ps.avgResponseTime ?? null,
|
||||||
escalated: ps.escalated ?? undefined,
|
escalated: ps.escalated ?? undefined,
|
||||||
forwardedHr: ps.forwardedHr ?? undefined,
|
forwardedHr: ps.forwardedHr ?? undefined,
|
||||||
tokens: ps.tokens ?? undefined,
|
|
||||||
tokensEur: ps.tokensEur ?? undefined,
|
|
||||||
initialMsg: ps.initialMsg ?? undefined,
|
initialMsg: ps.initialMsg ?? undefined,
|
||||||
fullTranscriptUrl: ps.fullTranscriptUrl ?? null,
|
fullTranscriptUrl: ps.fullTranscriptUrl ?? null,
|
||||||
transcriptContent: ps.transcriptContent ?? null,
|
transcriptContent: null, // Transcript content is now fetched from fullTranscriptUrl when needed
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return res.status(200).json({ sessions, totalSessions });
|
return NextResponse.json({ sessions, totalSessions });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage =
|
const errorMessage =
|
||||||
error instanceof Error ? error.message : "An unknown error occurred";
|
error instanceof Error ? error.message : "An unknown error occurred";
|
||||||
return res
|
return NextResponse.json(
|
||||||
.status(500)
|
{ error: "Failed to fetch sessions", details: errorMessage },
|
||||||
.json({ error: "Failed to fetch sessions", details: errorMessage });
|
{ status: 500 }
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
36
app/api/dashboard/settings/route.ts
Normal file
36
app/api/dashboard/settings/route.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { getServerSession } from "next-auth";
|
||||||
|
import { prisma } from "../../../../lib/prisma";
|
||||||
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const session = await getServerSession(authOptions);
|
||||||
|
if (!session?.user || session.user.role !== "ADMIN") {
|
||||||
|
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email as string },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json();
|
||||||
|
const { csvUrl, csvUsername, csvPassword, sentimentThreshold } = body;
|
||||||
|
|
||||||
|
await prisma.company.update({
|
||||||
|
where: { id: user.companyId },
|
||||||
|
data: {
|
||||||
|
csvUrl,
|
||||||
|
csvUsername,
|
||||||
|
...(csvPassword ? { csvPassword } : {}),
|
||||||
|
sentimentAlert: sentimentThreshold
|
||||||
|
? parseFloat(sentimentThreshold)
|
||||||
|
: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json({ ok: true });
|
||||||
|
}
|
||||||
80
app/api/dashboard/users/route.ts
Normal file
80
app/api/dashboard/users/route.ts
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import crypto from "crypto";
|
||||||
|
import { getServerSession } from "next-auth";
|
||||||
|
import { prisma } from "../../../../lib/prisma";
|
||||||
|
import bcrypt from "bcryptjs";
|
||||||
|
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||||
|
|
||||||
|
interface UserBasicInfo {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
role: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const session = await getServerSession(authOptions);
|
||||||
|
if (!session?.user || session.user.role !== "ADMIN") {
|
||||||
|
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email as string },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const users = await prisma.user.findMany({
|
||||||
|
where: { companyId: user.companyId },
|
||||||
|
});
|
||||||
|
|
||||||
|
const mappedUsers: UserBasicInfo[] = users.map((u) => ({
|
||||||
|
id: u.id,
|
||||||
|
email: u.email,
|
||||||
|
role: u.role,
|
||||||
|
}));
|
||||||
|
|
||||||
|
return NextResponse.json({ users: mappedUsers });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const session = await getServerSession(authOptions);
|
||||||
|
if (!session?.user || session.user.role !== "ADMIN") {
|
||||||
|
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email as string },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json();
|
||||||
|
const { email, role } = body;
|
||||||
|
|
||||||
|
if (!email || !role) {
|
||||||
|
return NextResponse.json({ error: "Missing fields" }, { status: 400 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const exists = await prisma.user.findUnique({ where: { email } });
|
||||||
|
if (exists) {
|
||||||
|
return NextResponse.json({ error: "Email exists" }, { status: 409 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const tempPassword = crypto.randomBytes(12).toString("base64").slice(0, 12); // secure random initial password
|
||||||
|
|
||||||
|
await prisma.user.create({
|
||||||
|
data: {
|
||||||
|
email,
|
||||||
|
password: await bcrypt.hash(tempPassword, 10),
|
||||||
|
companyId: user.companyId,
|
||||||
|
role,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// TODO: Email user their temp password (stub, for demo) - Implement a robust and secure email sending mechanism. Consider using a transactional email service.
|
||||||
|
return NextResponse.json({ ok: true, tempPassword });
|
||||||
|
}
|
||||||
28
app/api/forgot-password/route.ts
Normal file
28
app/api/forgot-password/route.ts
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { prisma } from "../../../lib/prisma";
|
||||||
|
import { sendEmail } from "../../../lib/sendEmail";
|
||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const body = await request.json();
|
||||||
|
const { email } = body as { email: string };
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({ where: { email } });
|
||||||
|
if (!user) {
|
||||||
|
// Always return 200 for privacy (don't reveal if email exists)
|
||||||
|
return NextResponse.json({ success: true }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = crypto.randomBytes(32).toString("hex");
|
||||||
|
const expiry = new Date(Date.now() + 1000 * 60 * 30); // 30 min expiry
|
||||||
|
|
||||||
|
await prisma.user.update({
|
||||||
|
where: { email },
|
||||||
|
data: { resetToken: token, resetTokenExpiry: expiry },
|
||||||
|
});
|
||||||
|
|
||||||
|
const resetUrl = `${process.env.NEXTAUTH_URL || "http://localhost:3000"}/reset-password?token=${token}`;
|
||||||
|
await sendEmail(email, "Password Reset", `Reset your password: ${resetUrl}`);
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true }, { status: 200 });
|
||||||
|
}
|
||||||
63
app/api/register/route.ts
Normal file
63
app/api/register/route.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { prisma } from "../../../lib/prisma";
|
||||||
|
import bcrypt from "bcryptjs";
|
||||||
|
|
||||||
|
interface RegisterRequestBody {
|
||||||
|
email: string;
|
||||||
|
password: string;
|
||||||
|
company: string;
|
||||||
|
csvUrl?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const body = await request.json();
|
||||||
|
const { email, password, company, csvUrl } = body as RegisterRequestBody;
|
||||||
|
|
||||||
|
if (!email || !password || !company) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: "Missing required fields",
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if email exists
|
||||||
|
const exists = await prisma.user.findUnique({
|
||||||
|
where: { email },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (exists) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: "Email already exists",
|
||||||
|
},
|
||||||
|
{ status: 409 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const newCompany = await prisma.company.create({
|
||||||
|
data: { name: company, csvUrl: csvUrl || "" },
|
||||||
|
});
|
||||||
|
|
||||||
|
const hashed = await bcrypt.hash(password, 10);
|
||||||
|
|
||||||
|
await prisma.user.create({
|
||||||
|
data: {
|
||||||
|
email,
|
||||||
|
password: hashed,
|
||||||
|
companyId: newCompany.id,
|
||||||
|
role: "ADMIN",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: true,
|
||||||
|
data: { success: true },
|
||||||
|
},
|
||||||
|
{ status: 201 }
|
||||||
|
);
|
||||||
|
}
|
||||||
63
app/api/reset-password/route.ts
Normal file
63
app/api/reset-password/route.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { prisma } from "../../../lib/prisma";
|
||||||
|
import bcrypt from "bcryptjs";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const body = await request.json();
|
||||||
|
const { token, password } = body as { token?: string; password?: string };
|
||||||
|
|
||||||
|
if (!token || !password) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Token and password are required." },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (password.length < 8) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Password must be at least 8 characters long." },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const user = await prisma.user.findFirst({
|
||||||
|
where: {
|
||||||
|
resetToken: token,
|
||||||
|
resetTokenExpiry: { gte: new Date() },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: "Invalid or expired token. Please request a new password reset.",
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const hash = await bcrypt.hash(password, 10);
|
||||||
|
await prisma.user.update({
|
||||||
|
where: { id: user.id },
|
||||||
|
data: {
|
||||||
|
password: hash,
|
||||||
|
resetToken: null,
|
||||||
|
resetTokenExpiry: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{ message: "Password has been reset successfully." },
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Reset password error:", error);
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: "An internal server error occurred. Please try again later.",
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -77,8 +77,8 @@ export default function CompanySettingsPage() {
|
|||||||
return <div className="text-center py-10">Loading settings...</div>;
|
return <div className="text-center py-10">Loading settings...</div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for admin access
|
// Check for ADMIN access
|
||||||
if (session?.user?.role !== "admin") {
|
if (session?.user?.role !== "ADMIN") {
|
||||||
return (
|
return (
|
||||||
<div className="text-center py-10 bg-white rounded-xl shadow p-6">
|
<div className="text-center py-10 bg-white rounded-xl shadow p-6">
|
||||||
<h2 className="font-bold text-xl text-red-600 mb-2">Access Denied</h2>
|
<h2 className="font-bold text-xl text-red-600 mb-2">Access Denied</h2>
|
||||||
|
|||||||
@ -1,66 +1,117 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState, useCallback, useRef } from "react";
|
||||||
import { signOut, useSession } from "next-auth/react";
|
import { signOut, useSession } from "next-auth/react";
|
||||||
import { useRouter } from "next/navigation";
|
import { useRouter } from "next/navigation";
|
||||||
import {
|
|
||||||
SessionsLineChart,
|
|
||||||
CategoriesBarChart,
|
|
||||||
LanguagePieChart,
|
|
||||||
TokenUsageChart,
|
|
||||||
} from "../../../components/Charts";
|
|
||||||
import { Company, MetricsResult, WordCloudWord } from "../../../lib/types";
|
import { Company, MetricsResult, WordCloudWord } from "../../../lib/types";
|
||||||
import MetricCard from "../../../components/MetricCard";
|
import MetricCard from "../../../components/ui/metric-card";
|
||||||
import DonutChart from "../../../components/DonutChart";
|
import ModernLineChart from "../../../components/charts/line-chart";
|
||||||
|
import ModernBarChart from "../../../components/charts/bar-chart";
|
||||||
|
import ModernDonutChart from "../../../components/charts/donut-chart";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
import { Button } from "@/components/ui/button";
|
||||||
|
import { Badge } from "@/components/ui/badge";
|
||||||
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
import { Separator } from "@/components/ui/separator";
|
||||||
|
import {
|
||||||
|
DropdownMenu,
|
||||||
|
DropdownMenuContent,
|
||||||
|
DropdownMenuItem,
|
||||||
|
DropdownMenuTrigger,
|
||||||
|
} from "@/components/ui/dropdown-menu";
|
||||||
|
import {
|
||||||
|
MessageSquare,
|
||||||
|
Users,
|
||||||
|
Clock,
|
||||||
|
Zap,
|
||||||
|
Euro,
|
||||||
|
TrendingUp,
|
||||||
|
CheckCircle,
|
||||||
|
RefreshCw,
|
||||||
|
LogOut,
|
||||||
|
Calendar,
|
||||||
|
MoreVertical,
|
||||||
|
Globe,
|
||||||
|
MessageCircle,
|
||||||
|
} from "lucide-react";
|
||||||
import WordCloud from "../../../components/WordCloud";
|
import WordCloud from "../../../components/WordCloud";
|
||||||
import GeographicMap from "../../../components/GeographicMap";
|
import GeographicMap from "../../../components/GeographicMap";
|
||||||
import ResponseTimeDistribution from "../../../components/ResponseTimeDistribution";
|
import ResponseTimeDistribution from "../../../components/ResponseTimeDistribution";
|
||||||
import WelcomeBanner from "../../../components/WelcomeBanner";
|
import DateRangePicker from "../../../components/DateRangePicker";
|
||||||
|
import TopQuestionsChart from "../../../components/TopQuestionsChart";
|
||||||
|
|
||||||
// Safely wrapped component with useSession
|
// Safely wrapped component with useSession
|
||||||
function DashboardContent() {
|
function DashboardContent() {
|
||||||
const { data: session, status } = useSession(); // Add status from useSession
|
const { data: session, status } = useSession();
|
||||||
const router = useRouter(); // Initialize useRouter
|
const router = useRouter();
|
||||||
const [metrics, setMetrics] = useState<MetricsResult | null>(null);
|
const [metrics, setMetrics] = useState<MetricsResult | null>(null);
|
||||||
const [company, setCompany] = useState<Company | null>(null);
|
const [company, setCompany] = useState<Company | null>(null);
|
||||||
const [, setLoading] = useState<boolean>(false);
|
const [loading, setLoading] = useState<boolean>(false);
|
||||||
const [refreshing, setRefreshing] = useState<boolean>(false);
|
const [refreshing, setRefreshing] = useState<boolean>(false);
|
||||||
|
const [dateRange, setDateRange] = useState<{ minDate: string; maxDate: string } | null>(null);
|
||||||
|
const [selectedStartDate, setSelectedStartDate] = useState<string>("");
|
||||||
|
const [selectedEndDate, setSelectedEndDate] = useState<string>("");
|
||||||
|
const [isInitialLoad, setIsInitialLoad] = useState<boolean>(true);
|
||||||
|
|
||||||
const isAuditor = session?.user?.role === "auditor";
|
const isAuditor = session?.user?.role === "AUDITOR";
|
||||||
|
|
||||||
|
// Function to fetch metrics with optional date range
|
||||||
|
const fetchMetrics = async (startDate?: string, endDate?: string, isInitial = false) => {
|
||||||
|
setLoading(true);
|
||||||
|
try {
|
||||||
|
let url = "/api/dashboard/metrics";
|
||||||
|
if (startDate && endDate) {
|
||||||
|
url += `?startDate=${startDate}&endDate=${endDate}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await fetch(url);
|
||||||
|
const data = await res.json();
|
||||||
|
|
||||||
|
setMetrics(data.metrics);
|
||||||
|
setCompany(data.company);
|
||||||
|
|
||||||
|
// Set date range from API response (only on initial load)
|
||||||
|
if (data.dateRange && isInitial) {
|
||||||
|
setDateRange(data.dateRange);
|
||||||
|
setSelectedStartDate(data.dateRange.minDate);
|
||||||
|
setSelectedEndDate(data.dateRange.maxDate);
|
||||||
|
setIsInitialLoad(false);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching metrics:", error);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle date range changes with proper memoization
|
||||||
|
const handleDateRangeChange = useCallback((startDate: string, endDate: string) => {
|
||||||
|
// Only update if dates actually changed to prevent unnecessary API calls
|
||||||
|
if (startDate !== selectedStartDate || endDate !== selectedEndDate) {
|
||||||
|
setSelectedStartDate(startDate);
|
||||||
|
setSelectedEndDate(endDate);
|
||||||
|
fetchMetrics(startDate, endDate);
|
||||||
|
}
|
||||||
|
}, [selectedStartDate, selectedEndDate]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Redirect if not authenticated
|
// Redirect if not authenticated
|
||||||
if (status === "unauthenticated") {
|
if (status === "unauthenticated") {
|
||||||
router.push("/login");
|
router.push("/login");
|
||||||
return; // Stop further execution in this effect
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch metrics and company on mount if authenticated
|
// Fetch metrics and company on mount if authenticated
|
||||||
if (status === "authenticated") {
|
if (status === "authenticated" && isInitialLoad) {
|
||||||
const fetchData = async () => {
|
fetchMetrics(undefined, undefined, true);
|
||||||
setLoading(true);
|
|
||||||
const res = await fetch("/api/dashboard/metrics");
|
|
||||||
const data = await res.json();
|
|
||||||
console.log("Metrics from API:", {
|
|
||||||
avgSessionLength: data.metrics.avgSessionLength,
|
|
||||||
avgSessionTimeTrend: data.metrics.avgSessionTimeTrend,
|
|
||||||
totalSessionDuration: data.metrics.totalSessionDuration,
|
|
||||||
validSessionsForDuration: data.metrics.validSessionsForDuration,
|
|
||||||
});
|
|
||||||
setMetrics(data.metrics);
|
|
||||||
setCompany(data.company);
|
|
||||||
setLoading(false);
|
|
||||||
};
|
|
||||||
fetchData();
|
|
||||||
}
|
}
|
||||||
}, [status, router]); // Add status and router to dependency array
|
}, [status, router, isInitialLoad]);
|
||||||
|
|
||||||
async function handleRefresh() {
|
async function handleRefresh() {
|
||||||
if (isAuditor) return; // Prevent auditors from refreshing
|
if (isAuditor) return;
|
||||||
try {
|
try {
|
||||||
setRefreshing(true);
|
setRefreshing(true);
|
||||||
|
|
||||||
// Make sure we have a company ID to send
|
|
||||||
if (!company?.id) {
|
if (!company?.id) {
|
||||||
setRefreshing(false);
|
setRefreshing(false);
|
||||||
alert("Cannot refresh: Company ID is missing");
|
alert("Cannot refresh: Company ID is missing");
|
||||||
@ -74,7 +125,6 @@ function DashboardContent() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
// Refetch metrics
|
|
||||||
const metricsRes = await fetch("/api/dashboard/metrics");
|
const metricsRes = await fetch("/api/dashboard/metrics");
|
||||||
const data = await metricsRes.json();
|
const data = await metricsRes.json();
|
||||||
setMetrics(data.metrics);
|
setMetrics(data.metrics);
|
||||||
@ -87,70 +137,129 @@ function DashboardContent() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate sentiment distribution
|
|
||||||
const getSentimentData = () => {
|
|
||||||
if (!metrics) return { positive: 0, neutral: 0, negative: 0 };
|
|
||||||
|
|
||||||
if (
|
|
||||||
metrics.sentimentPositiveCount !== undefined &&
|
|
||||||
metrics.sentimentNeutralCount !== undefined &&
|
|
||||||
metrics.sentimentNegativeCount !== undefined
|
|
||||||
) {
|
|
||||||
return {
|
|
||||||
positive: metrics.sentimentPositiveCount,
|
|
||||||
neutral: metrics.sentimentNeutralCount,
|
|
||||||
negative: metrics.sentimentNegativeCount,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const total = metrics.totalSessions || 1;
|
|
||||||
return {
|
|
||||||
positive: Math.round(total * 0.6),
|
|
||||||
neutral: Math.round(total * 0.3),
|
|
||||||
negative: Math.round(total * 0.1),
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
// Prepare token usage data
|
|
||||||
const getTokenData = () => {
|
|
||||||
if (!metrics || !metrics.tokensByDay) {
|
|
||||||
return { labels: [], values: [], costs: [] };
|
|
||||||
}
|
|
||||||
|
|
||||||
const days = Object.keys(metrics.tokensByDay).sort();
|
|
||||||
const labels = days.slice(-7);
|
|
||||||
const values = labels.map((day) => metrics.tokensByDay?.[day] || 0);
|
|
||||||
const costs = labels.map((day) => metrics.tokensCostByDay?.[day] || 0);
|
|
||||||
|
|
||||||
return { labels, values, costs };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Show loading state while session status is being determined
|
// Show loading state while session status is being determined
|
||||||
if (status === "loading") {
|
if (status === "loading") {
|
||||||
return <div className="text-center py-10">Loading session...</div>;
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-[60vh]">
|
||||||
|
<div className="text-center space-y-4">
|
||||||
|
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto"></div>
|
||||||
|
<p className="text-muted-foreground">Loading session...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If unauthenticated and not redirected yet (should be handled by useEffect, but as a fallback)
|
|
||||||
if (status === "unauthenticated") {
|
if (status === "unauthenticated") {
|
||||||
return <div className="text-center py-10">Redirecting to login...</div>;
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-[60vh]">
|
||||||
|
<div className="text-center">
|
||||||
|
<p className="text-muted-foreground">Redirecting to login...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!metrics || !company) {
|
if (loading || !metrics || !company) {
|
||||||
return <div className="text-center py-10">Loading dashboard...</div>;
|
return (
|
||||||
|
<div className="space-y-8">
|
||||||
|
{/* Header Skeleton */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<div className="flex justify-between items-start">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Skeleton className="h-8 w-48" />
|
||||||
|
<Skeleton className="h-4 w-64" />
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<Skeleton className="h-10 w-24" />
|
||||||
|
<Skeleton className="h-10 w-20" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Metrics Grid Skeleton */}
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
{Array.from({ length: 8 }).map((_, i) => (
|
||||||
|
<MetricCard key={i} title="" value="" isLoading />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Charts Skeleton */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<Skeleton className="h-6 w-32" />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Skeleton className="h-64 w-full" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<Skeleton className="h-6 w-32" />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Skeleton className="h-64 w-full" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Function to prepare word cloud data from metrics.wordCloudData
|
// Data preparation functions
|
||||||
|
const getSentimentData = () => {
|
||||||
|
if (!metrics) return [];
|
||||||
|
|
||||||
|
const sentimentData = {
|
||||||
|
positive: metrics.sentimentPositiveCount ?? 0,
|
||||||
|
neutral: metrics.sentimentNeutralCount ?? 0,
|
||||||
|
negative: metrics.sentimentNegativeCount ?? 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
return [
|
||||||
|
{ name: "Positive", value: sentimentData.positive, color: "rgb(34, 197, 94)" },
|
||||||
|
{ name: "Neutral", value: sentimentData.neutral, color: "rgb(168, 162, 158)" },
|
||||||
|
{ name: "Negative", value: sentimentData.negative, color: "rgb(239, 68, 68)" },
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
const getSessionsOverTimeData = () => {
|
||||||
|
if (!metrics?.days) return [];
|
||||||
|
|
||||||
|
return Object.entries(metrics.days).map(([date, value]) => ({
|
||||||
|
date: new Date(date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' }),
|
||||||
|
value: value as number,
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
const getCategoriesData = () => {
|
||||||
|
if (!metrics?.categories) return [];
|
||||||
|
|
||||||
|
return Object.entries(metrics.categories).map(([name, value]) => ({
|
||||||
|
name: name.length > 15 ? name.substring(0, 15) + '...' : name,
|
||||||
|
value: value as number,
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
const getLanguagesData = () => {
|
||||||
|
if (!metrics?.languages) return [];
|
||||||
|
|
||||||
|
return Object.entries(metrics.languages).map(([name, value]) => ({
|
||||||
|
name,
|
||||||
|
value: value as number,
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
const getWordCloudData = (): WordCloudWord[] => {
|
const getWordCloudData = (): WordCloudWord[] => {
|
||||||
if (!metrics || !metrics.wordCloudData) return [];
|
if (!metrics?.wordCloudData) return [];
|
||||||
return metrics.wordCloudData;
|
return metrics.wordCloudData;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Function to prepare country data for the map using actual metrics
|
|
||||||
const getCountryData = () => {
|
const getCountryData = () => {
|
||||||
if (!metrics || !metrics.countries) return {};
|
if (!metrics?.countries) return {};
|
||||||
|
return Object.entries(metrics.countries).reduce(
|
||||||
// Convert the countries object from metrics to the format expected by GeographicMap
|
|
||||||
const result = Object.entries(metrics.countries).reduce(
|
|
||||||
(acc, [code, count]) => {
|
(acc, [code, count]) => {
|
||||||
if (code && count) {
|
if (code && count) {
|
||||||
acc[code] = count;
|
acc[code] = count;
|
||||||
@ -159,11 +268,8 @@ function DashboardContent() {
|
|||||||
},
|
},
|
||||||
{} as Record<string, number>
|
{} as Record<string, number>
|
||||||
);
|
);
|
||||||
|
|
||||||
return result;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Function to prepare response time distribution data
|
|
||||||
const getResponseTimeData = () => {
|
const getResponseTimeData = () => {
|
||||||
const avgTime = metrics.avgResponseTime || 1.5;
|
const avgTime = metrics.avgResponseTime || 1.5;
|
||||||
const simulatedData: number[] = [];
|
const simulatedData: number[] = [];
|
||||||
@ -178,252 +284,318 @@ function DashboardContent() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-8">
|
<div className="space-y-8">
|
||||||
<WelcomeBanner companyName={company.name} />
|
{/* Apple-Style Unified Header */}
|
||||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center bg-white p-6 rounded-2xl shadow-lg ring-1 ring-slate-200/50">
|
<Card className="border-0 bg-white shadow-sm">
|
||||||
<div>
|
<CardHeader className="pb-6">
|
||||||
<h1 className="text-3xl font-bold text-slate-800">{company.name}</h1>
|
<div className="flex flex-col space-y-6">
|
||||||
<p className="text-slate-500 mt-1">
|
{/* Top row: Company info and actions */}
|
||||||
Dashboard updated{" "}
|
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||||
<span className="font-medium text-slate-600">
|
<div className="space-y-1">
|
||||||
{new Date(metrics.lastUpdated || Date.now()).toLocaleString()}
|
<div className="flex items-center gap-3">
|
||||||
</span>
|
<h1 className="text-2xl font-semibold text-gray-900 tracking-tight">{company.name}</h1>
|
||||||
</p>
|
<Badge variant="secondary" className="text-xs font-medium bg-gray-100 text-gray-700 border-0">
|
||||||
</div>
|
Analytics Dashboard
|
||||||
<div className="flex items-center gap-3 mt-4 sm:mt-0">
|
</Badge>
|
||||||
<button
|
</div>
|
||||||
className="bg-sky-600 text-white py-2 px-5 rounded-lg shadow hover:bg-sky-700 transition-colors disabled:opacity-60 disabled:cursor-not-allowed flex items-center text-sm font-medium"
|
<p className="text-sm text-gray-500">
|
||||||
onClick={handleRefresh}
|
Last updated{" "}
|
||||||
disabled={refreshing || isAuditor}
|
<span className="font-medium text-gray-700">
|
||||||
>
|
{new Date(metrics.lastUpdated || Date.now()).toLocaleString()}
|
||||||
{refreshing ? (
|
</span>
|
||||||
<>
|
</p>
|
||||||
<svg
|
</div>
|
||||||
className="animate-spin -ml-1 mr-2 h-4 w-4 text-white"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
<div className="flex items-center gap-3">
|
||||||
fill="none"
|
<Button
|
||||||
viewBox="0 0 24 24"
|
onClick={handleRefresh}
|
||||||
|
disabled={refreshing || isAuditor}
|
||||||
|
size="sm"
|
||||||
|
className="gap-2 bg-blue-600 hover:bg-blue-700 border-0 shadow-sm"
|
||||||
>
|
>
|
||||||
<circle
|
<RefreshCw className={`h-4 w-4 ${refreshing ? 'animate-spin' : ''}`} />
|
||||||
className="opacity-25"
|
{refreshing ? "Refreshing..." : "Refresh"}
|
||||||
cx="12"
|
</Button>
|
||||||
cy="12"
|
|
||||||
r="10"
|
<DropdownMenu>
|
||||||
stroke="currentColor"
|
<DropdownMenuTrigger asChild>
|
||||||
strokeWidth="4"
|
<Button variant="outline" size="sm" className="border-gray-200 hover:bg-gray-50">
|
||||||
></circle>
|
<MoreVertical className="h-4 w-4" />
|
||||||
<path
|
</Button>
|
||||||
className="opacity-75"
|
</DropdownMenuTrigger>
|
||||||
fill="currentColor"
|
<DropdownMenuContent align="end" className="border-gray-200 shadow-lg">
|
||||||
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
|
<DropdownMenuItem onClick={() => signOut({ callbackUrl: "/login" })}>
|
||||||
></path>
|
<LogOut className="h-4 w-4 mr-2" />
|
||||||
</svg>
|
Sign out
|
||||||
Refreshing...
|
</DropdownMenuItem>
|
||||||
</>
|
</DropdownMenuContent>
|
||||||
) : (
|
</DropdownMenu>
|
||||||
"Refresh Data"
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Date Range Controls */}
|
||||||
|
{dateRange && (
|
||||||
|
<div className="border-t border-gray-100 pt-6">
|
||||||
|
<div className="flex flex-col sm:flex-row items-start sm:items-center gap-4">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Calendar className="h-4 w-4 text-gray-500" />
|
||||||
|
<span className="text-sm font-medium text-gray-700">Date Range:</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<label className="text-sm text-gray-600">From:</label>
|
||||||
|
<input
|
||||||
|
type="date"
|
||||||
|
value={selectedStartDate}
|
||||||
|
min={dateRange.minDate}
|
||||||
|
max={dateRange.maxDate}
|
||||||
|
onChange={(e) => handleDateRangeChange(e.target.value, selectedEndDate)}
|
||||||
|
className="px-3 py-1.5 text-sm border border-gray-200 rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<label className="text-sm text-gray-600">To:</label>
|
||||||
|
<input
|
||||||
|
type="date"
|
||||||
|
value={selectedEndDate}
|
||||||
|
min={dateRange.minDate}
|
||||||
|
max={dateRange.maxDate}
|
||||||
|
onChange={(e) => handleDateRangeChange(selectedStartDate, e.target.value)}
|
||||||
|
className="px-3 py-1.5 text-sm border border-gray-200 rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => {
|
||||||
|
const endDate = new Date().toISOString().split('T')[0];
|
||||||
|
const startDate = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString().split('T')[0];
|
||||||
|
handleDateRangeChange(startDate, endDate);
|
||||||
|
}}
|
||||||
|
className="text-xs border-gray-200 hover:bg-gray-50"
|
||||||
|
>
|
||||||
|
Last 7 days
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => {
|
||||||
|
const endDate = new Date().toISOString().split('T')[0];
|
||||||
|
const startDate = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0];
|
||||||
|
handleDateRangeChange(startDate, endDate);
|
||||||
|
}}
|
||||||
|
className="text-xs border-gray-200 hover:bg-gray-50"
|
||||||
|
>
|
||||||
|
Last 30 days
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => handleDateRangeChange(dateRange.minDate, dateRange.maxDate)}
|
||||||
|
className="text-xs border-gray-200 hover:bg-gray-50"
|
||||||
|
>
|
||||||
|
All time
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-gray-500 mt-2">
|
||||||
|
Available data: {new Date(dateRange.minDate).toLocaleDateString()} - {new Date(dateRange.maxDate).toLocaleDateString()}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
</button>
|
</div>
|
||||||
<button
|
</CardHeader>
|
||||||
className="bg-slate-100 text-slate-700 py-2 px-5 rounded-lg shadow hover:bg-slate-200 transition-colors flex items-center text-sm font-medium"
|
</Card>
|
||||||
onClick={() => signOut({ callbackUrl: "/login" })}
|
|
||||||
>
|
{/* Modern Metrics Grid */}
|
||||||
Sign out
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4">
|
|
||||||
<MetricCard
|
<MetricCard
|
||||||
title="Total Sessions"
|
title="Total Sessions"
|
||||||
value={metrics.totalSessions}
|
value={metrics.totalSessions?.toLocaleString()}
|
||||||
icon={
|
icon={<MessageSquare className="h-5 w-5" />}
|
||||||
<svg
|
|
||||||
className="h-5 w-5"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="1"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
d="M7 20l4-16m2 16l4-16M6 9h14M4 15h14"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
}
|
|
||||||
trend={{
|
trend={{
|
||||||
value: metrics.sessionTrend ?? 0,
|
value: metrics.sessionTrend ?? 0,
|
||||||
isPositive: (metrics.sessionTrend ?? 0) >= 0,
|
isPositive: (metrics.sessionTrend ?? 0) >= 0,
|
||||||
}}
|
}}
|
||||||
|
variant="primary"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<MetricCard
|
<MetricCard
|
||||||
title="Unique Users"
|
title="Unique Users"
|
||||||
value={metrics.uniqueUsers}
|
value={metrics.uniqueUsers?.toLocaleString()}
|
||||||
icon={
|
icon={<Users className="h-5 w-5" />}
|
||||||
<svg
|
|
||||||
className="h-5 w-5"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="1"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
}
|
|
||||||
trend={{
|
trend={{
|
||||||
value: metrics.usersTrend ?? 0,
|
value: metrics.usersTrend ?? 0,
|
||||||
isPositive: (metrics.usersTrend ?? 0) >= 0,
|
isPositive: (metrics.usersTrend ?? 0) >= 0,
|
||||||
}}
|
}}
|
||||||
|
variant="success"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<MetricCard
|
<MetricCard
|
||||||
title="Avg. Session Time"
|
title="Avg. Session Time"
|
||||||
value={`${Math.round(metrics.avgSessionLength || 0)}s`}
|
value={`${Math.round(metrics.avgSessionLength || 0)}s`}
|
||||||
icon={
|
icon={<Clock className="h-5 w-5" />}
|
||||||
<svg
|
|
||||||
className="h-5 w-5"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="1"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
}
|
|
||||||
trend={{
|
trend={{
|
||||||
value: metrics.avgSessionTimeTrend ?? 0,
|
value: metrics.avgSessionTimeTrend ?? 0,
|
||||||
isPositive: (metrics.avgSessionTimeTrend ?? 0) >= 0,
|
isPositive: (metrics.avgSessionTimeTrend ?? 0) >= 0,
|
||||||
}}
|
}}
|
||||||
|
variant="primary"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<MetricCard
|
<MetricCard
|
||||||
title="Avg. Response Time"
|
title="Avg. Response Time"
|
||||||
value={`${metrics.avgResponseTime?.toFixed(1) || 0}s`}
|
value={`${metrics.avgResponseTime?.toFixed(1) || 0}s`}
|
||||||
icon={
|
icon={<Zap className="h-5 w-5" />}
|
||||||
<svg
|
|
||||||
className="h-5 w-5"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="1"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
d="M13 10V3L4 14h7v7l9-11h-7z"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
}
|
|
||||||
trend={{
|
trend={{
|
||||||
value: metrics.avgResponseTimeTrend ?? 0,
|
value: metrics.avgResponseTimeTrend ?? 0,
|
||||||
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0, // Lower response time is better
|
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0,
|
||||||
}}
|
}}
|
||||||
|
variant="warning"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<MetricCard
|
||||||
|
title="Daily Costs"
|
||||||
|
value={`€${metrics.avgDailyCosts?.toFixed(4) || '0.0000'}`}
|
||||||
|
icon={<Euro className="h-5 w-5" />}
|
||||||
|
description="Average per day"
|
||||||
|
variant="warning"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<MetricCard
|
||||||
|
title="Peak Usage"
|
||||||
|
value={metrics.peakUsageTime || 'N/A'}
|
||||||
|
icon={<TrendingUp className="h-5 w-5" />}
|
||||||
|
description="Busiest hour"
|
||||||
|
variant="primary"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<MetricCard
|
||||||
|
title="Resolution Rate"
|
||||||
|
value={`${metrics.resolvedChatsPercentage?.toFixed(1) || '0.0'}%`}
|
||||||
|
icon={<CheckCircle className="h-5 w-5" />}
|
||||||
|
trend={{
|
||||||
|
value: metrics.resolvedChatsPercentage ?? 0,
|
||||||
|
isPositive: (metrics.resolvedChatsPercentage ?? 0) >= 80,
|
||||||
|
}}
|
||||||
|
variant={metrics.resolvedChatsPercentage && metrics.resolvedChatsPercentage >= 80 ? "success" : "warning"}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<MetricCard
|
||||||
|
title="Active Languages"
|
||||||
|
value={Object.keys(metrics.languages || {}).length}
|
||||||
|
icon={<Globe className="h-5 w-5" />}
|
||||||
|
description="Languages detected"
|
||||||
|
variant="success"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Charts Section */}
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||||
<div className="bg-white p-6 rounded-xl shadow lg:col-span-2">
|
<ModernLineChart
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
data={getSessionsOverTimeData()}
|
||||||
Sessions Over Time
|
title="Sessions Over Time"
|
||||||
</h3>
|
className="lg:col-span-2"
|
||||||
<SessionsLineChart sessionsPerDay={metrics.days} />
|
height={350}
|
||||||
</div>
|
/>
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
<ModernDonutChart
|
||||||
Conversation Sentiment
|
data={getSentimentData()}
|
||||||
</h3>
|
title="Conversation Sentiment"
|
||||||
<DonutChart
|
centerText={{
|
||||||
data={{
|
title: "Total",
|
||||||
labels: ["Positive", "Neutral", "Negative"],
|
value: metrics.totalSessions || 0,
|
||||||
values: [
|
}}
|
||||||
getSentimentData().positive,
|
height={350}
|
||||||
getSentimentData().neutral,
|
|
||||||
getSentimentData().negative,
|
|
||||||
],
|
|
||||||
colors: ["#1cad7c", "#a1a1a1", "#dc2626"],
|
|
||||||
}}
|
|
||||||
centerText={{
|
|
||||||
title: "Total",
|
|
||||||
value: metrics.totalSessions,
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
|
||||||
Sessions by Category
|
|
||||||
</h3>
|
|
||||||
<CategoriesBarChart categories={metrics.categories || {}} />
|
|
||||||
</div>
|
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
|
||||||
Languages Used
|
|
||||||
</h3>
|
|
||||||
<LanguagePieChart languages={metrics.languages || {}} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
|
||||||
Geographic Distribution
|
|
||||||
</h3>
|
|
||||||
<GeographicMap countries={getCountryData()} />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
|
||||||
Common Topics
|
|
||||||
</h3>
|
|
||||||
<div className="h-[300px]">
|
|
||||||
<WordCloud words={getWordCloudData()} width={500} height={400} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
|
||||||
Response Time Distribution
|
|
||||||
</h3>
|
|
||||||
<ResponseTimeDistribution
|
|
||||||
data={getResponseTimeData()}
|
|
||||||
average={metrics.avgResponseTime || 0}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="bg-white p-6 rounded-xl shadow">
|
|
||||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-3 mb-4">
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
<h3 className="font-bold text-lg text-gray-800">
|
<ModernBarChart
|
||||||
Token Usage & Costs
|
data={getCategoriesData()}
|
||||||
</h3>
|
title="Sessions by Category"
|
||||||
<div className="flex flex-col sm:flex-row gap-2 sm:gap-4 w-full sm:w-auto">
|
height={350}
|
||||||
<div className="text-sm bg-blue-50 text-blue-700 px-3 py-1 rounded-full flex items-center">
|
/>
|
||||||
<span className="font-semibold mr-1">Total Tokens:</span>
|
|
||||||
{metrics.totalTokens?.toLocaleString() || 0}
|
<ModernDonutChart
|
||||||
|
data={getLanguagesData()}
|
||||||
|
title="Languages Used"
|
||||||
|
height={350}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Geographic and Topics Section */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<Globe className="h-5 w-5" />
|
||||||
|
Geographic Distribution
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<GeographicMap countries={getCountryData()} />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<MessageCircle className="h-5 w-5" />
|
||||||
|
Common Topics
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="h-[300px]">
|
||||||
|
<WordCloud words={getWordCloudData()} width={500} height={300} />
|
||||||
</div>
|
</div>
|
||||||
<div className="text-sm bg-green-50 text-green-700 px-3 py-1 rounded-full flex items-center">
|
</CardContent>
|
||||||
<span className="font-semibold mr-1">Total Cost:</span>€
|
</Card>
|
||||||
{metrics.totalTokensEur?.toFixed(4) || 0}
|
</div>
|
||||||
|
|
||||||
|
{/* Top Questions Chart */}
|
||||||
|
<TopQuestionsChart data={metrics.topQuestions || []} />
|
||||||
|
|
||||||
|
{/* Response Time Distribution */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Response Time Distribution</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<ResponseTimeDistribution
|
||||||
|
data={getResponseTimeData()}
|
||||||
|
average={metrics.avgResponseTime || 0}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Token Usage Summary */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||||
|
<CardTitle>AI Usage & Costs</CardTitle>
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
<Badge variant="outline" className="gap-1">
|
||||||
|
<span className="font-semibold">Total Tokens:</span>
|
||||||
|
{metrics.totalTokens?.toLocaleString() || 0}
|
||||||
|
</Badge>
|
||||||
|
<Badge variant="outline" className="gap-1">
|
||||||
|
<span className="font-semibold">Total Cost:</span>
|
||||||
|
€{metrics.totalTokensEur?.toFixed(4) || 0}
|
||||||
|
</Badge>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</CardHeader>
|
||||||
<TokenUsageChart tokenData={getTokenData()} />
|
<CardContent>
|
||||||
</div>
|
<div className="text-center py-8 text-muted-foreground">
|
||||||
|
<p>Token usage chart will be implemented with historical data</p>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Our exported component
|
|
||||||
export default function DashboardPage() {
|
export default function DashboardPage() {
|
||||||
return <DashboardContent />;
|
return <DashboardContent />;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,19 @@ import { useSession } from "next-auth/react";
|
|||||||
import { useRouter } from "next/navigation";
|
import { useRouter } from "next/navigation";
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import { FC } from "react";
|
import { FC } from "react";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
import { Button } from "@/components/ui/button";
|
||||||
|
import { Badge } from "@/components/ui/badge";
|
||||||
|
import {
|
||||||
|
BarChart3,
|
||||||
|
MessageSquare,
|
||||||
|
Settings,
|
||||||
|
Users,
|
||||||
|
ArrowRight,
|
||||||
|
TrendingUp,
|
||||||
|
Shield,
|
||||||
|
Zap,
|
||||||
|
} from "lucide-react";
|
||||||
|
|
||||||
const DashboardPage: FC = () => {
|
const DashboardPage: FC = () => {
|
||||||
const { data: session, status } = useSession();
|
const { data: session, status } = useSession();
|
||||||
@ -21,82 +34,223 @@ const DashboardPage: FC = () => {
|
|||||||
|
|
||||||
if (loading) {
|
if (loading) {
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center justify-center min-h-[40vh]">
|
<div className="flex items-center justify-center min-h-[60vh]">
|
||||||
<div className="text-center">
|
<div className="text-center space-y-4">
|
||||||
<div className="animate-spin rounded-full h-12 w-12 border-t-2 border-b-2 border-sky-500 mx-auto mb-4"></div>
|
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto"></div>
|
||||||
<p className="text-lg text-gray-600">Loading dashboard...</p>
|
<p className="text-lg text-muted-foreground">Loading dashboard...</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const navigationCards = [
|
||||||
|
{
|
||||||
|
title: "Analytics Overview",
|
||||||
|
description: "View comprehensive metrics, charts, and insights from your chat sessions",
|
||||||
|
icon: <BarChart3 className="h-6 w-6" />,
|
||||||
|
href: "/dashboard/overview",
|
||||||
|
variant: "primary" as const,
|
||||||
|
features: ["Real-time metrics", "Interactive charts", "Trend analysis"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Session Browser",
|
||||||
|
description: "Browse, search, and analyze individual conversation sessions",
|
||||||
|
icon: <MessageSquare className="h-6 w-6" />,
|
||||||
|
href: "/dashboard/sessions",
|
||||||
|
variant: "success" as const,
|
||||||
|
features: ["Session search", "Conversation details", "Export data"],
|
||||||
|
},
|
||||||
|
...(session?.user?.role === "ADMIN"
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
title: "Company Settings",
|
||||||
|
description: "Configure company settings, integrations, and API connections",
|
||||||
|
icon: <Settings className="h-6 w-6" />,
|
||||||
|
href: "/dashboard/company",
|
||||||
|
variant: "warning" as const,
|
||||||
|
features: ["API configuration", "Integration settings", "Data management"],
|
||||||
|
adminOnly: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "User Management",
|
||||||
|
description: "Invite team members and manage user accounts and permissions",
|
||||||
|
icon: <Users className="h-6 w-6" />,
|
||||||
|
href: "/dashboard/users",
|
||||||
|
variant: "default" as const,
|
||||||
|
features: ["User invitations", "Role management", "Access control"],
|
||||||
|
adminOnly: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
: []),
|
||||||
|
];
|
||||||
|
|
||||||
|
const getCardClasses = (variant: string) => {
|
||||||
|
switch (variant) {
|
||||||
|
case "primary":
|
||||||
|
return "border-primary/20 bg-linear-to-br from-primary/5 to-primary/10 hover:from-primary/10 hover:to-primary/15";
|
||||||
|
case "success":
|
||||||
|
return "border-green-200 bg-linear-to-br from-green-50 to-green-100 hover:from-green-100 hover:to-green-150 dark:border-green-800 dark:from-green-950 dark:to-green-900";
|
||||||
|
case "warning":
|
||||||
|
return "border-amber-200 bg-linear-to-br from-amber-50 to-amber-100 hover:from-amber-100 hover:to-amber-150 dark:border-amber-800 dark:from-amber-950 dark:to-amber-900";
|
||||||
|
default:
|
||||||
|
return "border-border bg-linear-to-br from-card to-muted/20 hover:from-muted/30 hover:to-muted/40";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getIconClasses = (variant: string) => {
|
||||||
|
switch (variant) {
|
||||||
|
case "primary":
|
||||||
|
return "bg-primary/10 text-primary border-primary/20";
|
||||||
|
case "success":
|
||||||
|
return "bg-green-100 text-green-600 border-green-200 dark:bg-green-900 dark:text-green-400 dark:border-green-800";
|
||||||
|
case "warning":
|
||||||
|
return "bg-amber-100 text-amber-600 border-amber-200 dark:bg-amber-900 dark:text-amber-400 dark:border-amber-800";
|
||||||
|
default:
|
||||||
|
return "bg-muted text-muted-foreground border-border";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-8">
|
||||||
<div className="bg-white rounded-xl shadow p-6">
|
{/* Welcome Header */}
|
||||||
<h1 className="text-2xl font-bold mb-4">Dashboard</h1>
|
<Card className="border-0 bg-linear-to-r from-primary/5 via-primary/10 to-primary/5">
|
||||||
|
<CardHeader>
|
||||||
<div className="grid sm:grid-cols-2 lg:grid-cols-3 gap-6">
|
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||||
<div className="bg-gradient-to-br from-sky-50 to-sky-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
<div className="space-y-2">
|
||||||
<h2 className="text-lg font-semibold text-sky-700">Analytics</h2>
|
<div className="flex items-center gap-3">
|
||||||
<p className="text-gray-600 mt-2 mb-4">
|
<h1 className="text-3xl font-bold tracking-tight">
|
||||||
View your chat session metrics and analytics
|
Welcome back, {session?.user?.name || "User"}!
|
||||||
</p>
|
</h1>
|
||||||
<button
|
<Badge variant="secondary" className="text-xs">
|
||||||
onClick={() => router.push("/dashboard/overview")}
|
{session?.user?.role}
|
||||||
className="bg-sky-500 hover:bg-sky-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
</Badge>
|
||||||
>
|
</div>
|
||||||
View Analytics
|
<p className="text-muted-foreground">
|
||||||
</button>
|
Choose a section below to explore your analytics dashboard
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="bg-gradient-to-br from-emerald-50 to-emerald-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
|
||||||
<h2 className="text-lg font-semibold text-emerald-700">Sessions</h2>
|
|
||||||
<p className="text-gray-600 mt-2 mb-4">
|
|
||||||
Browse and analyze conversation sessions
|
|
||||||
</p>
|
|
||||||
<button
|
|
||||||
onClick={() => router.push("/dashboard/sessions")}
|
|
||||||
className="bg-emerald-500 hover:bg-emerald-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
|
||||||
>
|
|
||||||
View Sessions
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{session?.user?.role === "admin" && (
|
|
||||||
<div className="bg-gradient-to-br from-purple-50 to-purple-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
|
||||||
<h2 className="text-lg font-semibold text-purple-700">
|
|
||||||
Company Settings
|
|
||||||
</h2>
|
|
||||||
<p className="text-gray-600 mt-2 mb-4">
|
|
||||||
Configure company settings and integrations
|
|
||||||
</p>
|
</p>
|
||||||
<button
|
|
||||||
onClick={() => router.push("/dashboard/company")}
|
|
||||||
className="bg-purple-500 hover:bg-purple-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
|
||||||
>
|
|
||||||
Manage Settings
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="flex items-center gap-1 text-sm text-muted-foreground">
|
||||||
|
<Shield className="h-4 w-4" />
|
||||||
|
Secure Dashboard
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
</Card>
|
||||||
|
|
||||||
{session?.user?.role === "admin" && (
|
{/* Navigation Cards */}
|
||||||
<div className="bg-gradient-to-br from-amber-50 to-amber-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
<h2 className="text-lg font-semibold text-amber-700">
|
{navigationCards.map((card, index) => (
|
||||||
User Management
|
<Card
|
||||||
</h2>
|
key={index}
|
||||||
<p className="text-gray-600 mt-2 mb-4">
|
className={`relative overflow-hidden transition-all duration-200 hover:shadow-lg hover:-translate-y-0.5 cursor-pointer ${getCardClasses(
|
||||||
Invite and manage user accounts
|
card.variant
|
||||||
</p>
|
)}`}
|
||||||
<button
|
onClick={() => router.push(card.href)}
|
||||||
onClick={() => router.push("/dashboard/users")}
|
>
|
||||||
className="bg-amber-500 hover:bg-amber-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
{/* Subtle gradient overlay */}
|
||||||
|
<div className="absolute inset-0 bg-linear-to-br from-white/50 to-transparent dark:from-white/5 pointer-events-none" />
|
||||||
|
|
||||||
|
<CardHeader className="relative">
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div
|
||||||
|
className={`flex h-12 w-12 shrink-0 items-center justify-center rounded-full border transition-colors ${getIconClasses(
|
||||||
|
card.variant
|
||||||
|
)}`}
|
||||||
|
>
|
||||||
|
{card.icon}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<CardTitle className="text-xl font-semibold flex items-center gap-2">
|
||||||
|
{card.title}
|
||||||
|
{card.adminOnly && (
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
Admin
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</CardTitle>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground leading-relaxed">
|
||||||
|
{card.description}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
|
||||||
|
<CardContent className="relative space-y-4">
|
||||||
|
{/* Features List */}
|
||||||
|
<div className="space-y-2">
|
||||||
|
{card.features.map((feature, featureIndex) => (
|
||||||
|
<div key={featureIndex} className="flex items-center gap-2 text-sm">
|
||||||
|
<div className="h-1.5 w-1.5 rounded-full bg-current opacity-60" />
|
||||||
|
<span className="text-muted-foreground">{feature}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Action Button */}
|
||||||
|
<Button
|
||||||
|
className="w-full gap-2 mt-4"
|
||||||
|
variant={card.variant === "primary" ? "default" : "outline"}
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
router.push(card.href);
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
Manage Users
|
<span>
|
||||||
</button>
|
{card.title === "Analytics Overview" && "View Analytics"}
|
||||||
</div>
|
{card.title === "Session Browser" && "Browse Sessions"}
|
||||||
)}
|
{card.title === "Company Settings" && "Manage Settings"}
|
||||||
</div>
|
{card.title === "User Management" && "Manage Users"}
|
||||||
|
</span>
|
||||||
|
<ArrowRight className="h-4 w-4" />
|
||||||
|
</Button>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Quick Stats */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<TrendingUp className="h-5 w-5" />
|
||||||
|
Quick Stats
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-3 gap-6">
|
||||||
|
<div className="text-center space-y-2">
|
||||||
|
<div className="flex items-center justify-center gap-2">
|
||||||
|
<Zap className="h-5 w-5 text-primary" />
|
||||||
|
<span className="text-2xl font-bold">Real-time</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm text-muted-foreground">Data updates</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="text-center space-y-2">
|
||||||
|
<div className="flex items-center justify-center gap-2">
|
||||||
|
<Shield className="h-5 w-5 text-green-600" />
|
||||||
|
<span className="text-2xl font-bold">Secure</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm text-muted-foreground">Data protection</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="text-center space-y-2">
|
||||||
|
<div className="flex items-center justify-center gap-2">
|
||||||
|
<BarChart3 className="h-5 w-5 text-blue-600" />
|
||||||
|
<span className="text-2xl font-bold">Advanced</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm text-muted-foreground">Analytics</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import { useParams, useRouter } from "next/navigation"; // Import useRouter
|
|||||||
import { useSession } from "next-auth/react"; // Import useSession
|
import { useSession } from "next-auth/react"; // Import useSession
|
||||||
import SessionDetails from "../../../../components/SessionDetails";
|
import SessionDetails from "../../../../components/SessionDetails";
|
||||||
import TranscriptViewer from "../../../../components/TranscriptViewer";
|
import TranscriptViewer from "../../../../components/TranscriptViewer";
|
||||||
|
import MessageViewer from "../../../../components/MessageViewer";
|
||||||
import { ChatSession } from "../../../../lib/types";
|
import { ChatSession } from "../../../../lib/types";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
|
|
||||||
@ -107,7 +108,7 @@ export default function SessionViewPage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-gradient-to-br from-slate-50 to-sky-100 p-4 md:p-6">
|
<div className="min-h-screen bg-linear-to-br from-slate-50 to-sky-100 p-4 md:p-6">
|
||||||
<div className="max-w-4xl mx-auto">
|
<div className="max-w-4xl mx-auto">
|
||||||
<div className="mb-6">
|
<div className="mb-6">
|
||||||
<Link
|
<Link
|
||||||
@ -136,30 +137,26 @@ export default function SessionViewPage() {
|
|||||||
<div>
|
<div>
|
||||||
<SessionDetails session={session} />
|
<SessionDetails session={session} />
|
||||||
</div>
|
</div>
|
||||||
{session.transcriptContent &&
|
|
||||||
session.transcriptContent.trim() !== "" ? (
|
{/* Show parsed messages if available */}
|
||||||
<div className="mt-0">
|
{session.messages && session.messages.length > 0 && (
|
||||||
<TranscriptViewer
|
<div>
|
||||||
transcriptContent={session.transcriptContent}
|
<MessageViewer messages={session.messages} />
|
||||||
transcriptUrl={session.fullTranscriptUrl}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
) : (
|
)}
|
||||||
|
|
||||||
|
{/* Show transcript URL if available */}
|
||||||
|
{session.fullTranscriptUrl && (
|
||||||
<div className="bg-white p-4 rounded-lg shadow">
|
<div className="bg-white p-4 rounded-lg shadow">
|
||||||
<h3 className="font-bold text-lg mb-3">Transcript</h3>
|
<h3 className="font-bold text-lg mb-3">Source Transcript</h3>
|
||||||
<p className="text-gray-600">
|
<a
|
||||||
No transcript content available for this session.
|
href={session.fullTranscriptUrl}
|
||||||
</p>
|
target="_blank"
|
||||||
{session.fullTranscriptUrl && (
|
rel="noopener noreferrer"
|
||||||
<a
|
className="text-sky-600 hover:underline"
|
||||||
href={session.fullTranscriptUrl}
|
>
|
||||||
target="_blank"
|
View Original Transcript
|
||||||
rel="noopener noreferrer"
|
</a>
|
||||||
className="text-sky-600 hover:underline mt-2 inline-block"
|
|
||||||
>
|
|
||||||
View Source Transcript URL
|
|
||||||
</a>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -37,7 +37,7 @@ export default function DashboardSettings({
|
|||||||
else setMessage("Failed.");
|
else setMessage("Failed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (session.user.role !== "admin") return null;
|
if (session.user.role !== "ADMIN") return null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="bg-white p-6 rounded-xl shadow mb-6">
|
<div className="bg-white p-6 rounded-xl shadow mb-6">
|
||||||
|
|||||||
@ -34,7 +34,7 @@ export default function UserManagement({ session }: UserManagementProps) {
|
|||||||
else setMsg("Failed.");
|
else setMsg("Failed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (session.user.role !== "admin") return null;
|
if (session.user.role !== "ADMIN") return null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="bg-white p-6 rounded-xl shadow mb-6">
|
<div className="bg-white p-6 rounded-xl shadow mb-6">
|
||||||
@ -52,8 +52,8 @@ export default function UserManagement({ session }: UserManagementProps) {
|
|||||||
onChange={(e) => setRole(e.target.value)}
|
onChange={(e) => setRole(e.target.value)}
|
||||||
>
|
>
|
||||||
<option value="user">User</option>
|
<option value="user">User</option>
|
||||||
<option value="admin">Admin</option>
|
<option value="ADMIN">Admin</option>
|
||||||
<option value="auditor">Auditor</option>
|
<option value="AUDITOR">Auditor</option>
|
||||||
</select>
|
</select>
|
||||||
<button
|
<button
|
||||||
className="bg-blue-600 text-white rounded px-4 py-2 sm:py-0 w-full sm:w-auto"
|
className="bg-blue-600 text-white rounded px-4 py-2 sm:py-0 w-full sm:w-auto"
|
||||||
|
|||||||
@ -69,7 +69,7 @@ export default function UserManagementPage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check for admin access
|
// Check for admin access
|
||||||
if (session?.user?.role !== "admin") {
|
if (session?.user?.role !== "ADMIN") {
|
||||||
return (
|
return (
|
||||||
<div className="text-center py-10 bg-white rounded-xl shadow p-6">
|
<div className="text-center py-10 bg-white rounded-xl shadow p-6">
|
||||||
<h2 className="font-bold text-xl text-red-600 mb-2">Access Denied</h2>
|
<h2 className="font-bold text-xl text-red-600 mb-2">Access Denied</h2>
|
||||||
@ -124,8 +124,8 @@ export default function UserManagementPage() {
|
|||||||
onChange={(e) => setRole(e.target.value)}
|
onChange={(e) => setRole(e.target.value)}
|
||||||
>
|
>
|
||||||
<option value="user">User</option>
|
<option value="user">User</option>
|
||||||
<option value="admin">Admin</option>
|
<option value="ADMIN">Admin</option>
|
||||||
<option value="auditor">Auditor</option>
|
<option value="AUDITOR">Auditor</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -183,9 +183,9 @@ export default function UserManagementPage() {
|
|||||||
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-500">
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-500">
|
||||||
<span
|
<span
|
||||||
className={`px-2 inline-flex text-xs leading-5 font-semibold rounded-full ${
|
className={`px-2 inline-flex text-xs leading-5 font-semibold rounded-full ${
|
||||||
user.role === "admin"
|
user.role === "ADMIN"
|
||||||
? "bg-purple-100 text-purple-800"
|
? "bg-purple-100 text-purple-800"
|
||||||
: user.role === "auditor"
|
: user.role === "AUDITOR"
|
||||||
? "bg-blue-100 text-blue-800"
|
? "bg-blue-100 text-blue-800"
|
||||||
: "bg-green-100 text-green-800"
|
: "bg-green-100 text-green-800"
|
||||||
}`}
|
}`}
|
||||||
|
|||||||
138
app/globals.css
138
app/globals.css
@ -1 +1,139 @@
|
|||||||
@import "tailwindcss";
|
@import "tailwindcss";
|
||||||
|
@import "tw-animate-css";
|
||||||
|
|
||||||
|
@custom-variant dark (&:is(.dark *));
|
||||||
|
|
||||||
|
@theme inline {
|
||||||
|
--radius-sm: calc(var(--radius) - 4px);
|
||||||
|
--radius-md: calc(var(--radius) - 2px);
|
||||||
|
--radius-lg: var(--radius);
|
||||||
|
--radius-xl: calc(var(--radius) + 4px);
|
||||||
|
--color-background: var(--background);
|
||||||
|
--color-foreground: var(--foreground);
|
||||||
|
--color-card: var(--card);
|
||||||
|
--color-card-foreground: var(--card-foreground);
|
||||||
|
--color-popover: var(--popover);
|
||||||
|
--color-popover-foreground: var(--popover-foreground);
|
||||||
|
--color-primary: var(--primary);
|
||||||
|
--color-primary-foreground: var(--primary-foreground);
|
||||||
|
--color-secondary: var(--secondary);
|
||||||
|
--color-secondary-foreground: var(--secondary-foreground);
|
||||||
|
--color-muted: var(--muted);
|
||||||
|
--color-muted-foreground: var(--muted-foreground);
|
||||||
|
--color-accent: var(--accent);
|
||||||
|
--color-accent-foreground: var(--accent-foreground);
|
||||||
|
--color-destructive: var(--destructive);
|
||||||
|
--color-border: var(--border);
|
||||||
|
--color-input: var(--input);
|
||||||
|
--color-ring: var(--ring);
|
||||||
|
--color-chart-1: var(--chart-1);
|
||||||
|
--color-chart-2: var(--chart-2);
|
||||||
|
--color-chart-3: var(--chart-3);
|
||||||
|
--color-chart-4: var(--chart-4);
|
||||||
|
--color-chart-5: var(--chart-5);
|
||||||
|
--color-sidebar: var(--sidebar);
|
||||||
|
--color-sidebar-foreground: var(--sidebar-foreground);
|
||||||
|
--color-sidebar-primary: var(--sidebar-primary);
|
||||||
|
--color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
|
||||||
|
--color-sidebar-accent: var(--sidebar-accent);
|
||||||
|
--color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
|
||||||
|
--color-sidebar-border: var(--sidebar-border);
|
||||||
|
--color-sidebar-ring: var(--sidebar-ring);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root {
|
||||||
|
--radius: 0.625rem;
|
||||||
|
--background: 255 255 255;
|
||||||
|
--foreground: 15 23 42;
|
||||||
|
--card: 255 255 255;
|
||||||
|
--card-foreground: 15 23 42;
|
||||||
|
--popover: 255 255 255;
|
||||||
|
--popover-foreground: 15 23 42;
|
||||||
|
--primary: 0 123 255;
|
||||||
|
--primary-foreground: 255 255 255;
|
||||||
|
--secondary: 245 245 245;
|
||||||
|
--secondary-foreground: 51 51 51;
|
||||||
|
--muted: 248 250 252;
|
||||||
|
--muted-foreground: 100 116 139;
|
||||||
|
--accent: 245 245 245;
|
||||||
|
--accent-foreground: 51 51 51;
|
||||||
|
--destructive: 239 68 68;
|
||||||
|
--border: 229 231 235;
|
||||||
|
--input: 229 231 235;
|
||||||
|
--ring: 0 123 255;
|
||||||
|
--chart-1: 0 123 255;
|
||||||
|
--chart-2: 255 20 147;
|
||||||
|
--chart-3: 50 205 50;
|
||||||
|
--chart-4: 138 43 226;
|
||||||
|
--chart-5: 255 215 0;
|
||||||
|
--sidebar: 248 250 252;
|
||||||
|
--sidebar-foreground: 15 23 42;
|
||||||
|
--sidebar-primary: 0 123 255;
|
||||||
|
--sidebar-primary-foreground: 255 255 255;
|
||||||
|
--sidebar-accent: 245 245 245;
|
||||||
|
--sidebar-accent-foreground: 51 51 51;
|
||||||
|
--sidebar-border: 229 231 235;
|
||||||
|
--sidebar-ring: 0 123 255;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark {
|
||||||
|
--background: 15 23 42;
|
||||||
|
--foreground: 248 250 252;
|
||||||
|
--card: 30 41 59;
|
||||||
|
--card-foreground: 248 250 252;
|
||||||
|
--popover: 30 41 59;
|
||||||
|
--popover-foreground: 248 250 252;
|
||||||
|
--primary: 59 130 246;
|
||||||
|
--primary-foreground: 15 23 42;
|
||||||
|
--secondary: 51 65 85;
|
||||||
|
--secondary-foreground: 248 250 252;
|
||||||
|
--muted: 51 65 85;
|
||||||
|
--muted-foreground: 148 163 184;
|
||||||
|
--accent: 51 65 85;
|
||||||
|
--accent-foreground: 248 250 252;
|
||||||
|
--destructive: 248 113 113;
|
||||||
|
--border: 51 65 85;
|
||||||
|
--input: 51 65 85;
|
||||||
|
--ring: 59 130 246;
|
||||||
|
--chart-1: 59 130 246;
|
||||||
|
--chart-2: 236 72 153;
|
||||||
|
--chart-3: 34 197 94;
|
||||||
|
--chart-4: 147 51 234;
|
||||||
|
--chart-5: 251 191 36;
|
||||||
|
--sidebar: 30 41 59;
|
||||||
|
--sidebar-foreground: 248 250 252;
|
||||||
|
--sidebar-primary: 59 130 246;
|
||||||
|
--sidebar-primary-foreground: 248 250 252;
|
||||||
|
--sidebar-accent: 51 65 85;
|
||||||
|
--sidebar-accent-foreground: 248 250 252;
|
||||||
|
--sidebar-border: 51 65 85;
|
||||||
|
--sidebar-ring: 59 130 246;
|
||||||
|
}
|
||||||
|
|
||||||
|
@layer base {
|
||||||
|
* {
|
||||||
|
@apply border-border outline-ring/50;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
@apply bg-gray-50 text-gray-900;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Apple-style scrollbars */
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: rgba(0, 0, 0, 0.2);
|
||||||
|
border-radius: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: rgba(0, 0, 0, 0.3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import { getServerSession } from "next-auth";
|
import { getServerSession } from "next-auth";
|
||||||
import { redirect } from "next/navigation";
|
import { redirect } from "next/navigation";
|
||||||
import { authOptions } from "../pages/api/auth/[...nextauth]";
|
import { authOptions } from "./api/auth/[...nextauth]/route";
|
||||||
|
|
||||||
export default async function HomePage() {
|
export default async function HomePage() {
|
||||||
const session = await getServerSession(authOptions);
|
const session = await getServerSession(authOptions);
|
||||||
|
|||||||
@ -7,9 +7,9 @@ export function Providers({ children }: { children: ReactNode }) {
|
|||||||
// Including error handling and refetch interval for better user experience
|
// Including error handling and refetch interval for better user experience
|
||||||
return (
|
return (
|
||||||
<SessionProvider
|
<SessionProvider
|
||||||
// Re-fetch session every 10 minutes
|
// Re-fetch session every 30 minutes (reduced from 10)
|
||||||
refetchInterval={10 * 60}
|
refetchInterval={30 * 60}
|
||||||
refetchOnWindowFocus={true}
|
refetchOnWindowFocus={false}
|
||||||
>
|
>
|
||||||
{children}
|
{children}
|
||||||
</SessionProvider>
|
</SessionProvider>
|
||||||
|
|||||||
@ -7,7 +7,7 @@ export default function RegisterPage() {
|
|||||||
const [company, setCompany] = useState<string>("");
|
const [company, setCompany] = useState<string>("");
|
||||||
const [password, setPassword] = useState<string>("");
|
const [password, setPassword] = useState<string>("");
|
||||||
const [csvUrl, setCsvUrl] = useState<string>("");
|
const [csvUrl, setCsvUrl] = useState<string>("");
|
||||||
const [role, setRole] = useState<string>("admin"); // Default to admin for company registration
|
const [role, setRole] = useState<string>("ADMIN"); // Default to ADMIN for company registration
|
||||||
const [error, setError] = useState<string>("");
|
const [error, setError] = useState<string>("");
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ export default function RegisterPage() {
|
|||||||
>
|
>
|
||||||
<option value="admin">Admin</option>
|
<option value="admin">Admin</option>
|
||||||
<option value="user">User</option>
|
<option value="user">User</option>
|
||||||
<option value="auditor">Auditor</option>
|
<option value="AUDITOR">Auditor</option>
|
||||||
</select>
|
</select>
|
||||||
<button className="bg-blue-600 text-white rounded py-2" type="submit">
|
<button className="bg-blue-600 text-white rounded py-2" type="submit">
|
||||||
Register & Continue
|
Register & Continue
|
||||||
|
|||||||
78
check-refactored-pipeline-status.ts
Normal file
78
check-refactored-pipeline-status.ts
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
async function checkRefactoredPipelineStatus() {
|
||||||
|
try {
|
||||||
|
console.log('=== REFACTORED PIPELINE STATUS ===\n');
|
||||||
|
|
||||||
|
// Get pipeline status using the new system
|
||||||
|
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||||
|
|
||||||
|
console.log(`Total Sessions: ${pipelineStatus.totalSessions}\n`);
|
||||||
|
|
||||||
|
// Display status for each stage
|
||||||
|
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||||
|
|
||||||
|
for (const stage of stages) {
|
||||||
|
console.log(`${stage}:`);
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
|
||||||
|
const pending = stageData.PENDING || 0;
|
||||||
|
const inProgress = stageData.IN_PROGRESS || 0;
|
||||||
|
const completed = stageData.COMPLETED || 0;
|
||||||
|
const failed = stageData.FAILED || 0;
|
||||||
|
const skipped = stageData.SKIPPED || 0;
|
||||||
|
|
||||||
|
console.log(` PENDING: ${pending}`);
|
||||||
|
console.log(` IN_PROGRESS: ${inProgress}`);
|
||||||
|
console.log(` COMPLETED: ${completed}`);
|
||||||
|
console.log(` FAILED: ${failed}`);
|
||||||
|
console.log(` SKIPPED: ${skipped}`);
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show what needs processing
|
||||||
|
console.log('=== WHAT NEEDS PROCESSING ===');
|
||||||
|
|
||||||
|
for (const stage of stages) {
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
const pending = stageData.PENDING || 0;
|
||||||
|
const failed = stageData.FAILED || 0;
|
||||||
|
|
||||||
|
if (pending > 0 || failed > 0) {
|
||||||
|
console.log(`• ${stage}: ${pending} pending, ${failed} failed`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show failed sessions if any
|
||||||
|
const failedSessions = await ProcessingStatusManager.getFailedSessions();
|
||||||
|
if (failedSessions.length > 0) {
|
||||||
|
console.log('\n=== FAILED SESSIONS ===');
|
||||||
|
failedSessions.slice(0, 5).forEach(failure => {
|
||||||
|
console.log(` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (failedSessions.length > 5) {
|
||||||
|
console.log(` ... and ${failedSessions.length - 5} more failed sessions`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show sessions ready for AI processing
|
||||||
|
const readyForAI = await ProcessingStatusManager.getSessionsNeedingProcessing('AI_ANALYSIS', 5);
|
||||||
|
if (readyForAI.length > 0) {
|
||||||
|
console.log('\n=== SESSIONS READY FOR AI PROCESSING ===');
|
||||||
|
readyForAI.forEach(status => {
|
||||||
|
console.log(` ${status.session.import?.externalSessionId || status.sessionId} (created: ${status.session.createdAt})`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error checking pipeline status:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
checkRefactoredPipelineStatus();
|
||||||
21
components.json
Normal file
21
components.json
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://ui.shadcn.com/schema.json",
|
||||||
|
"style": "new-york",
|
||||||
|
"rsc": true,
|
||||||
|
"tsx": true,
|
||||||
|
"tailwind": {
|
||||||
|
"config": "",
|
||||||
|
"css": "app/globals.css",
|
||||||
|
"baseColor": "neutral",
|
||||||
|
"cssVariables": true,
|
||||||
|
"prefix": ""
|
||||||
|
},
|
||||||
|
"aliases": {
|
||||||
|
"components": "@/components",
|
||||||
|
"utils": "@/lib/utils",
|
||||||
|
"ui": "@/components/ui",
|
||||||
|
"lib": "@/lib",
|
||||||
|
"hooks": "@/hooks"
|
||||||
|
},
|
||||||
|
"iconLibrary": "lucide"
|
||||||
|
}
|
||||||
@ -128,9 +128,9 @@ export function SentimentChart({ sentimentData }: SentimentChartProps) {
|
|||||||
sentimentData.negative,
|
sentimentData.negative,
|
||||||
],
|
],
|
||||||
backgroundColor: [
|
backgroundColor: [
|
||||||
"rgba(34, 197, 94, 0.8)", // green
|
"rgba(37, 99, 235, 0.8)", // blue (primary)
|
||||||
"rgba(249, 115, 22, 0.8)", // orange
|
"rgba(107, 114, 128, 0.8)", // gray
|
||||||
"rgba(239, 68, 68, 0.8)", // red
|
"rgba(236, 72, 153, 0.8)", // pink
|
||||||
],
|
],
|
||||||
borderWidth: 1,
|
borderWidth: 1,
|
||||||
},
|
},
|
||||||
@ -196,12 +196,12 @@ export function LanguagePieChart({ languages }: LanguagePieChartProps) {
|
|||||||
{
|
{
|
||||||
data,
|
data,
|
||||||
backgroundColor: [
|
backgroundColor: [
|
||||||
"rgba(59, 130, 246, 0.8)",
|
"rgba(37, 99, 235, 0.8)", // blue (primary)
|
||||||
"rgba(16, 185, 129, 0.8)",
|
"rgba(107, 114, 128, 0.8)", // gray
|
||||||
"rgba(249, 115, 22, 0.8)",
|
"rgba(236, 72, 153, 0.8)", // pink
|
||||||
"rgba(236, 72, 153, 0.8)",
|
"rgba(34, 197, 94, 0.8)", // lime green
|
||||||
"rgba(139, 92, 246, 0.8)",
|
"rgba(168, 85, 247, 0.8)", // purple
|
||||||
"rgba(107, 114, 128, 0.8)",
|
"rgba(251, 191, 36, 0.8)", // yellow
|
||||||
],
|
],
|
||||||
borderWidth: 1,
|
borderWidth: 1,
|
||||||
},
|
},
|
||||||
|
|||||||
172
components/DateRangePicker.tsx
Normal file
172
components/DateRangePicker.tsx
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { useState, useEffect, useRef, memo } from "react";
|
||||||
|
|
||||||
|
interface DateRangePickerProps {
|
||||||
|
minDate: string;
|
||||||
|
maxDate: string;
|
||||||
|
onDateRangeChange: (startDate: string, endDate: string) => void;
|
||||||
|
initialStartDate?: string;
|
||||||
|
initialEndDate?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function DateRangePicker({
|
||||||
|
minDate,
|
||||||
|
maxDate,
|
||||||
|
onDateRangeChange,
|
||||||
|
initialStartDate,
|
||||||
|
initialEndDate,
|
||||||
|
}: DateRangePickerProps) {
|
||||||
|
const [startDate, setStartDate] = useState(initialStartDate || minDate);
|
||||||
|
const [endDate, setEndDate] = useState(initialEndDate || maxDate);
|
||||||
|
const isInitializedRef = useRef(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Update local state when props change (e.g., when date range is loaded from API)
|
||||||
|
if (initialStartDate && initialStartDate !== startDate) {
|
||||||
|
setStartDate(initialStartDate);
|
||||||
|
}
|
||||||
|
if (initialEndDate && initialEndDate !== endDate) {
|
||||||
|
setEndDate(initialEndDate);
|
||||||
|
}
|
||||||
|
}, [initialStartDate, initialEndDate]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Only notify parent component after initial render and when dates actually change
|
||||||
|
// This prevents the infinite loop by not including onDateRangeChange in dependencies
|
||||||
|
if (isInitializedRef.current) {
|
||||||
|
onDateRangeChange(startDate, endDate);
|
||||||
|
} else {
|
||||||
|
isInitializedRef.current = true;
|
||||||
|
}
|
||||||
|
}, [startDate, endDate]);
|
||||||
|
|
||||||
|
const handleStartDateChange = (newStartDate: string) => {
|
||||||
|
// Ensure start date is not before min date
|
||||||
|
if (newStartDate < minDate) {
|
||||||
|
setStartDate(minDate);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure start date is not after end date
|
||||||
|
if (newStartDate > endDate) {
|
||||||
|
setEndDate(newStartDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
setStartDate(newStartDate);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleEndDateChange = (newEndDate: string) => {
|
||||||
|
// Ensure end date is not after max date
|
||||||
|
if (newEndDate > maxDate) {
|
||||||
|
setEndDate(maxDate);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure end date is not before start date
|
||||||
|
if (newEndDate < startDate) {
|
||||||
|
setStartDate(newEndDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
setEndDate(newEndDate);
|
||||||
|
};
|
||||||
|
|
||||||
|
const resetToFullRange = () => {
|
||||||
|
setStartDate(minDate);
|
||||||
|
setEndDate(maxDate);
|
||||||
|
};
|
||||||
|
|
||||||
|
const setLast30Days = () => {
|
||||||
|
const thirtyDaysAgo = new Date();
|
||||||
|
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
||||||
|
const thirtyDaysAgoStr = thirtyDaysAgo.toISOString().split('T')[0];
|
||||||
|
|
||||||
|
// Use the later of 30 days ago or minDate
|
||||||
|
const newStartDate = thirtyDaysAgoStr > minDate ? thirtyDaysAgoStr : minDate;
|
||||||
|
setStartDate(newStartDate);
|
||||||
|
setEndDate(maxDate);
|
||||||
|
};
|
||||||
|
|
||||||
|
const setLast7Days = () => {
|
||||||
|
const sevenDaysAgo = new Date();
|
||||||
|
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
|
||||||
|
const sevenDaysAgoStr = sevenDaysAgo.toISOString().split('T')[0];
|
||||||
|
|
||||||
|
// Use the later of 7 days ago or minDate
|
||||||
|
const newStartDate = sevenDaysAgoStr > minDate ? sevenDaysAgoStr : minDate;
|
||||||
|
setStartDate(newStartDate);
|
||||||
|
setEndDate(maxDate);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="bg-white p-4 rounded-lg shadow-sm border border-gray-200">
|
||||||
|
<div className="flex flex-col sm:flex-row gap-4 items-start sm:items-center">
|
||||||
|
<div className="flex flex-col sm:flex-row gap-3 items-start sm:items-center">
|
||||||
|
<label className="text-sm font-medium text-gray-700 whitespace-nowrap">
|
||||||
|
Date Range:
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<div className="flex flex-col sm:flex-row gap-2 items-start sm:items-center">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<label htmlFor="start-date" className="text-sm text-gray-600">
|
||||||
|
From:
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
id="start-date"
|
||||||
|
type="date"
|
||||||
|
value={startDate}
|
||||||
|
min={minDate}
|
||||||
|
max={maxDate}
|
||||||
|
onChange={(e) => handleStartDateChange(e.target.value)}
|
||||||
|
className="px-3 py-1.5 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-sky-500 focus:border-sky-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<label htmlFor="end-date" className="text-sm text-gray-600">
|
||||||
|
To:
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
id="end-date"
|
||||||
|
type="date"
|
||||||
|
value={endDate}
|
||||||
|
min={minDate}
|
||||||
|
max={maxDate}
|
||||||
|
onChange={(e) => handleEndDateChange(e.target.value)}
|
||||||
|
className="px-3 py-1.5 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-sky-500 focus:border-sky-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
<button
|
||||||
|
onClick={setLast7Days}
|
||||||
|
className="px-3 py-1.5 text-xs font-medium text-sky-600 bg-sky-50 border border-sky-200 rounded-md hover:bg-sky-100 transition-colors"
|
||||||
|
>
|
||||||
|
Last 7 days
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={setLast30Days}
|
||||||
|
className="px-3 py-1.5 text-xs font-medium text-sky-600 bg-sky-50 border border-sky-200 rounded-md hover:bg-sky-100 transition-colors"
|
||||||
|
>
|
||||||
|
Last 30 days
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={resetToFullRange}
|
||||||
|
className="px-3 py-1.5 text-xs font-medium text-gray-600 bg-gray-50 border border-gray-200 rounded-md hover:bg-gray-100 transition-colors"
|
||||||
|
>
|
||||||
|
All time
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-2 text-xs text-gray-500">
|
||||||
|
Available data: {new Date(minDate).toLocaleDateString()} - {new Date(maxDate).toLocaleDateString()}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export memoized component as default
|
||||||
|
export default memo(DateRangePicker);
|
||||||
@ -25,6 +25,30 @@ const getCountryCoordinates = (): Record<string, [number, number]> => {
|
|||||||
US: [37.0902, -95.7129],
|
US: [37.0902, -95.7129],
|
||||||
GB: [55.3781, -3.436],
|
GB: [55.3781, -3.436],
|
||||||
BA: [43.9159, 17.6791],
|
BA: [43.9159, 17.6791],
|
||||||
|
NL: [52.1326, 5.2913],
|
||||||
|
DE: [51.1657, 10.4515],
|
||||||
|
FR: [46.6034, 1.8883],
|
||||||
|
IT: [41.8719, 12.5674],
|
||||||
|
ES: [40.4637, -3.7492],
|
||||||
|
CA: [56.1304, -106.3468],
|
||||||
|
PL: [51.9194, 19.1451],
|
||||||
|
SE: [60.1282, 18.6435],
|
||||||
|
NO: [60.472, 8.4689],
|
||||||
|
FI: [61.9241, 25.7482],
|
||||||
|
CH: [46.8182, 8.2275],
|
||||||
|
AT: [47.5162, 14.5501],
|
||||||
|
BE: [50.8503, 4.3517],
|
||||||
|
DK: [56.2639, 9.5018],
|
||||||
|
CZ: [49.8175, 15.473],
|
||||||
|
HU: [47.1625, 19.5033],
|
||||||
|
PT: [39.3999, -8.2245],
|
||||||
|
GR: [39.0742, 21.8243],
|
||||||
|
RO: [45.9432, 24.9668],
|
||||||
|
IE: [53.4129, -8.2439],
|
||||||
|
BG: [42.7339, 25.4858],
|
||||||
|
HR: [45.1, 15.2],
|
||||||
|
SK: [48.669, 19.699],
|
||||||
|
SI: [46.1512, 14.9955]
|
||||||
};
|
};
|
||||||
// This function now primarily returns fallbacks.
|
// This function now primarily returns fallbacks.
|
||||||
// The actual fetching using @rapideditor/country-coder will be in the component's useEffect.
|
// The actual fetching using @rapideditor/country-coder will be in the component's useEffect.
|
||||||
|
|||||||
79
components/MessageViewer.tsx
Normal file
79
components/MessageViewer.tsx
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { Message } from "../lib/types";
|
||||||
|
|
||||||
|
interface MessageViewerProps {
|
||||||
|
messages: Message[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Component to display parsed messages in a chat-like format
|
||||||
|
*/
|
||||||
|
export default function MessageViewer({ messages }: MessageViewerProps) {
|
||||||
|
if (!messages || messages.length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white p-4 rounded-lg shadow">
|
||||||
|
<h3 className="font-bold text-lg mb-3">Conversation</h3>
|
||||||
|
<p className="text-gray-500 italic">No parsed messages available</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="bg-white p-4 rounded-lg shadow">
|
||||||
|
<h3 className="font-bold text-lg mb-3">
|
||||||
|
Conversation ({messages.length} messages)
|
||||||
|
</h3>
|
||||||
|
|
||||||
|
<div className="space-y-3 max-h-96 overflow-y-auto">
|
||||||
|
{messages.map((message) => (
|
||||||
|
<div
|
||||||
|
key={message.id}
|
||||||
|
className={`flex ${
|
||||||
|
message.role.toLowerCase() === "user"
|
||||||
|
? "justify-end"
|
||||||
|
: "justify-start"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`max-w-xs lg:max-w-md px-4 py-2 rounded-lg ${
|
||||||
|
message.role.toLowerCase() === "user"
|
||||||
|
? "bg-blue-500 text-white"
|
||||||
|
: message.role.toLowerCase() === "assistant"
|
||||||
|
? "bg-gray-200 text-gray-800"
|
||||||
|
: "bg-yellow-100 text-yellow-800"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between mb-1">
|
||||||
|
<span className="text-xs font-medium opacity-75 mr-2">
|
||||||
|
{message.role}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs opacity-75 ml-2">
|
||||||
|
{message.timestamp ? new Date(message.timestamp).toLocaleTimeString() : 'No timestamp'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="text-sm whitespace-pre-wrap">
|
||||||
|
{message.content}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-4 pt-3 border-t text-sm text-gray-500">
|
||||||
|
<div className="flex justify-between">
|
||||||
|
<span>
|
||||||
|
First message: {messages[0].timestamp ? new Date(messages[0].timestamp).toLocaleString() : 'No timestamp'}
|
||||||
|
</span>
|
||||||
|
<span>
|
||||||
|
Last message:{" "}
|
||||||
|
{(() => {
|
||||||
|
const lastMessage = messages[messages.length - 1];
|
||||||
|
return lastMessage.timestamp ? new Date(lastMessage.timestamp).toLocaleString() : 'No timestamp';
|
||||||
|
})()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,10 +1,15 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useRef, useEffect } from "react";
|
import {
|
||||||
import Chart from "chart.js/auto";
|
BarChart,
|
||||||
import annotationPlugin from "chartjs-plugin-annotation";
|
Bar,
|
||||||
|
XAxis,
|
||||||
Chart.register(annotationPlugin);
|
YAxis,
|
||||||
|
CartesianGrid,
|
||||||
|
Tooltip,
|
||||||
|
ResponsiveContainer,
|
||||||
|
ReferenceLine,
|
||||||
|
} from "recharts";
|
||||||
|
|
||||||
interface ResponseTimeDistributionProps {
|
interface ResponseTimeDistributionProps {
|
||||||
data: number[];
|
data: number[];
|
||||||
@ -12,114 +17,145 @@ interface ResponseTimeDistributionProps {
|
|||||||
targetResponseTime?: number;
|
targetResponseTime?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const CustomTooltip = ({ active, payload, label }: any) => {
|
||||||
|
if (active && payload && payload.length) {
|
||||||
|
return (
|
||||||
|
<div className="rounded-lg border border-gray-200 bg-white p-3 shadow-md">
|
||||||
|
<p className="text-sm font-medium text-gray-900">{label}</p>
|
||||||
|
<p className="text-sm text-gray-600">
|
||||||
|
<span className="font-medium text-gray-900">
|
||||||
|
{payload[0].value}
|
||||||
|
</span>{" "}
|
||||||
|
responses
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
export default function ResponseTimeDistribution({
|
export default function ResponseTimeDistribution({
|
||||||
data,
|
data,
|
||||||
average,
|
average,
|
||||||
targetResponseTime,
|
targetResponseTime,
|
||||||
}: ResponseTimeDistributionProps) {
|
}: ResponseTimeDistributionProps) {
|
||||||
const ref = useRef<HTMLCanvasElement | null>(null);
|
if (!data || !data.length) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center h-64 text-muted-foreground">
|
||||||
|
No response time data available
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
// Create bins for the histogram (0-1s, 1-2s, 2-3s, etc.)
|
||||||
if (!ref.current || !data || !data.length) return;
|
const maxTime = Math.ceil(Math.max(...data));
|
||||||
|
const bins = Array(Math.min(maxTime + 1, 10)).fill(0);
|
||||||
|
|
||||||
const ctx = ref.current.getContext("2d");
|
// Count responses in each bin
|
||||||
if (!ctx) return;
|
data.forEach((time) => {
|
||||||
|
const binIndex = Math.min(Math.floor(time), bins.length - 1);
|
||||||
|
bins[binIndex]++;
|
||||||
|
});
|
||||||
|
|
||||||
// Create bins for the histogram (0-1s, 1-2s, 2-3s, etc.)
|
// Create chart data
|
||||||
const maxTime = Math.ceil(Math.max(...data));
|
const chartData = bins.map((count, i) => {
|
||||||
const bins = Array(Math.min(maxTime + 1, 10)).fill(0);
|
let label;
|
||||||
|
if (i === bins.length - 1 && bins.length < maxTime + 1) {
|
||||||
|
label = `${i}+ sec`;
|
||||||
|
} else {
|
||||||
|
label = `${i}-${i + 1} sec`;
|
||||||
|
}
|
||||||
|
|
||||||
// Count responses in each bin
|
// Determine color based on response time using cohesive palette
|
||||||
data.forEach((time) => {
|
let color;
|
||||||
const binIndex = Math.min(Math.floor(time), bins.length - 1);
|
if (i <= 2) color = "rgb(37, 99, 235)"; // Blue for fast (primary color)
|
||||||
bins[binIndex]++;
|
else if (i <= 5) color = "rgb(107, 114, 128)"; // Gray for medium
|
||||||
});
|
else color = "rgb(236, 72, 153)"; // Pink for slow
|
||||||
|
|
||||||
// Create labels for each bin
|
return {
|
||||||
const labels = bins.map((_, i) => {
|
name: label,
|
||||||
if (i === bins.length - 1 && bins.length < maxTime + 1) {
|
value: count,
|
||||||
return `${i}+ seconds`;
|
color,
|
||||||
}
|
};
|
||||||
return `${i}-${i + 1} seconds`;
|
});
|
||||||
});
|
|
||||||
|
|
||||||
const chart = new Chart(ctx, {
|
return (
|
||||||
type: "bar",
|
<div className="h-64">
|
||||||
data: {
|
<ResponsiveContainer width="100%" height="100%">
|
||||||
labels,
|
<BarChart data={chartData} margin={{ top: 20, right: 30, left: 20, bottom: 5 }}>
|
||||||
datasets: [
|
<CartesianGrid
|
||||||
{
|
strokeDasharray="3 3"
|
||||||
label: "Responses",
|
stroke="rgb(229, 231, 235)"
|
||||||
data: bins,
|
strokeOpacity={0.5}
|
||||||
backgroundColor: bins.map((_, i) => {
|
/>
|
||||||
// Green for fast, yellow for medium, red for slow
|
<XAxis
|
||||||
if (i <= 2) return "rgba(34, 197, 94, 0.7)"; // Green
|
dataKey="name"
|
||||||
if (i <= 5) return "rgba(250, 204, 21, 0.7)"; // Yellow
|
stroke="rgb(100, 116, 139)"
|
||||||
return "rgba(239, 68, 68, 0.7)"; // Red
|
fontSize={12}
|
||||||
}),
|
tickLine={false}
|
||||||
borderWidth: 1,
|
axisLine={false}
|
||||||
},
|
/>
|
||||||
],
|
<YAxis
|
||||||
},
|
stroke="rgb(100, 116, 139)"
|
||||||
options: {
|
fontSize={12}
|
||||||
responsive: true,
|
tickLine={false}
|
||||||
plugins: {
|
axisLine={false}
|
||||||
legend: { display: false },
|
label={{
|
||||||
annotation: {
|
value: 'Number of Responses',
|
||||||
annotations: {
|
angle: -90,
|
||||||
averageLine: {
|
position: 'insideLeft',
|
||||||
type: "line",
|
style: { textAnchor: 'middle', fill: 'rgb(100, 116, 139)' }
|
||||||
yMin: 0,
|
}}
|
||||||
yMax: Math.max(...bins),
|
/>
|
||||||
xMin: average,
|
<Tooltip content={<CustomTooltip />} />
|
||||||
xMax: average,
|
|
||||||
borderColor: "rgba(75, 192, 192, 1)",
|
<Bar
|
||||||
borderWidth: 2,
|
dataKey="value"
|
||||||
label: {
|
radius={[4, 4, 0, 0]}
|
||||||
display: true,
|
fill="hsl(var(--chart-1))"
|
||||||
content: "Avg: " + average.toFixed(1) + "s",
|
>
|
||||||
position: "start",
|
{chartData.map((entry, index) => (
|
||||||
},
|
<Bar key={`cell-${index}`} fill={entry.color} />
|
||||||
},
|
))}
|
||||||
targetLine: targetResponseTime
|
</Bar>
|
||||||
? {
|
|
||||||
type: "line",
|
|
||||||
yMin: 0,
|
|
||||||
yMax: Math.max(...bins),
|
|
||||||
xMin: targetResponseTime,
|
|
||||||
xMax: targetResponseTime,
|
|
||||||
borderColor: "rgba(75, 192, 192, 0.7)",
|
|
||||||
borderWidth: 2,
|
|
||||||
label: {
|
|
||||||
display: true,
|
|
||||||
content: "Target",
|
|
||||||
position: "end",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
scales: {
|
|
||||||
y: {
|
|
||||||
beginAtZero: true,
|
|
||||||
title: {
|
|
||||||
display: true,
|
|
||||||
text: "Number of Responses",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
x: {
|
|
||||||
title: {
|
|
||||||
display: true,
|
|
||||||
text: "Response Time",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return () => chart.destroy();
|
{/* Average line */}
|
||||||
}, [data, average, targetResponseTime]);
|
<ReferenceLine
|
||||||
|
x={Math.floor(average)}
|
||||||
|
stroke="rgb(0, 123, 255)"
|
||||||
|
strokeWidth={2}
|
||||||
|
strokeDasharray="5 5"
|
||||||
|
label={{
|
||||||
|
value: `Avg: ${average.toFixed(1)}s`,
|
||||||
|
position: "top" as const,
|
||||||
|
style: {
|
||||||
|
fill: "rgb(0, 123, 255)",
|
||||||
|
fontSize: "12px",
|
||||||
|
fontWeight: "500"
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
|
||||||
return <canvas ref={ref} height={180} />;
|
{/* Target line (if provided) */}
|
||||||
|
{targetResponseTime && (
|
||||||
|
<ReferenceLine
|
||||||
|
x={Math.floor(targetResponseTime)}
|
||||||
|
stroke="rgb(255, 20, 147)"
|
||||||
|
strokeWidth={2}
|
||||||
|
strokeDasharray="3 3"
|
||||||
|
label={{
|
||||||
|
value: `Target: ${targetResponseTime}s`,
|
||||||
|
position: "top" as const,
|
||||||
|
style: {
|
||||||
|
fill: "rgb(255, 20, 147)",
|
||||||
|
fontSize: "12px",
|
||||||
|
fontWeight: "500"
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,11 +15,10 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
|||||||
return (
|
return (
|
||||||
<div className="bg-white p-4 rounded-lg shadow">
|
<div className="bg-white p-4 rounded-lg shadow">
|
||||||
<h3 className="font-bold text-lg mb-3">Session Details</h3>
|
<h3 className="font-bold text-lg mb-3">Session Details</h3>
|
||||||
|
<div className="space-y-3">
|
||||||
<div className="space-y-2">
|
|
||||||
<div className="flex justify-between border-b pb-2">
|
<div className="flex justify-between border-b pb-2">
|
||||||
<span className="text-gray-600">Session ID:</span>
|
<span className="text-gray-600">Session ID:</span>
|
||||||
<span className="font-medium">{session.sessionId || session.id}</span>
|
<span className="font-medium font-mono text-sm">{session.id}</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex justify-between border-b pb-2">
|
<div className="flex justify-between border-b pb-2">
|
||||||
@ -73,20 +72,15 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
|||||||
<div className="flex justify-between border-b pb-2">
|
<div className="flex justify-between border-b pb-2">
|
||||||
<span className="text-gray-600">Sentiment:</span>
|
<span className="text-gray-600">Sentiment:</span>
|
||||||
<span
|
<span
|
||||||
className={`font-medium ${
|
className={`font-medium capitalize ${
|
||||||
session.sentiment > 0.3
|
session.sentiment === "POSITIVE"
|
||||||
? "text-green-500"
|
? "text-green-500"
|
||||||
: session.sentiment < -0.3
|
: session.sentiment === "NEGATIVE"
|
||||||
? "text-red-500"
|
? "text-red-500"
|
||||||
: "text-orange-500"
|
: "text-orange-500"
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
{session.sentiment > 0.3
|
{session.sentiment.toLowerCase()}
|
||||||
? "Positive"
|
|
||||||
: session.sentiment < -0.3
|
|
||||||
? "Negative"
|
|
||||||
: "Neutral"}{" "}
|
|
||||||
({session.sentiment.toFixed(2)})
|
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@ -96,19 +90,6 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
|||||||
<span className="font-medium">{session.messagesSent || 0}</span>
|
<span className="font-medium">{session.messagesSent || 0}</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{typeof session.tokens === "number" && (
|
|
||||||
<div className="flex justify-between border-b pb-2">
|
|
||||||
<span className="text-gray-600">Tokens:</span>
|
|
||||||
<span className="font-medium">{session.tokens}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{typeof session.tokensEur === "number" && (
|
|
||||||
<div className="flex justify-between border-b pb-2">
|
|
||||||
<span className="text-gray-600">Cost:</span>
|
|
||||||
<span className="font-medium">€{session.tokensEur.toFixed(4)}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{session.avgResponseTime !== null &&
|
{session.avgResponseTime !== null &&
|
||||||
session.avgResponseTime !== undefined && (
|
session.avgResponseTime !== undefined && (
|
||||||
@ -142,23 +123,48 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Transcript rendering is now handled by the parent page (app/dashboard/sessions/[id]/page.tsx) */}
|
{session.ipAddress && (
|
||||||
{/* Fallback to link only if we only have the URL but no content - this might also be redundant if parent handles all transcript display */}
|
<div className="flex justify-between border-b pb-2">
|
||||||
{(!session.transcriptContent ||
|
<span className="text-gray-600">IP Address:</span>
|
||||||
session.transcriptContent.length === 0) &&
|
<span className="font-medium font-mono text-sm">
|
||||||
session.fullTranscriptUrl && (
|
{session.ipAddress}
|
||||||
<div className="flex justify-between pt-2">
|
</span>
|
||||||
<span className="text-gray-600">Transcript:</span>
|
</div>
|
||||||
<a
|
)}
|
||||||
href={session.fullTranscriptUrl}
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
{session.initialMsg && (
|
||||||
className="text-blue-500 hover:text-blue-700 underline"
|
<div className="border-b pb-2">
|
||||||
>
|
<span className="text-gray-600 block mb-1">Initial Message:</span>
|
||||||
View Full Transcript
|
<div className="bg-gray-50 p-2 rounded text-sm italic">
|
||||||
</a>
|
"{session.initialMsg}"
|
||||||
</div>
|
</div>
|
||||||
)}
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{session.summary && (
|
||||||
|
<div className="border-b pb-2">
|
||||||
|
<span className="text-gray-600 block mb-1">AI Summary:</span>
|
||||||
|
<div className="bg-blue-50 p-2 rounded text-sm">
|
||||||
|
{session.summary}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
|
||||||
|
{session.fullTranscriptUrl && (
|
||||||
|
<div className="flex justify-between pt-2">
|
||||||
|
<span className="text-gray-600">Transcript:</span>
|
||||||
|
<a
|
||||||
|
href={session.fullTranscriptUrl}
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="text-blue-500 hover:text-blue-700 underline"
|
||||||
|
>
|
||||||
|
View Full Transcript
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -167,7 +167,7 @@ const NavItem: React.FC<NavItemProps> = ({
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<span className={`flex-shrink-0 ${isExpanded ? "mr-3" : "mx-auto"}`}>
|
<span className={`shrink-0 ${isExpanded ? "mr-3" : "mx-auto"}`}>
|
||||||
{icon}
|
{icon}
|
||||||
</span>
|
</span>
|
||||||
{isExpanded ? (
|
{isExpanded ? (
|
||||||
@ -334,7 +334,7 @@ export default function Sidebar({
|
|||||||
isExpanded ? "" : "justify-center"
|
isExpanded ? "" : "justify-center"
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
<span className={`flex-shrink-0 ${isExpanded ? "mr-3" : ""}`}>
|
<span className={`shrink-0 ${isExpanded ? "mr-3" : ""}`}>
|
||||||
<LogoutIcon />
|
<LogoutIcon />
|
||||||
</span>
|
</span>
|
||||||
{isExpanded ? (
|
{isExpanded ? (
|
||||||
|
|||||||
76
components/TopQuestionsChart.tsx
Normal file
76
components/TopQuestionsChart.tsx
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import { TopQuestion } from '../lib/types';
|
||||||
|
|
||||||
|
interface TopQuestionsChartProps {
|
||||||
|
data: TopQuestion[];
|
||||||
|
title?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function TopQuestionsChart({ data, title = "Top 5 Asked Questions" }: TopQuestionsChartProps) {
|
||||||
|
if (!data || data.length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white p-6 rounded-2xl shadow-sm border border-gray-100">
|
||||||
|
<h3 className="text-lg font-semibold text-gray-900 mb-6">{title}</h3>
|
||||||
|
<div className="text-center py-12 text-gray-500">
|
||||||
|
No questions data available
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the maximum count to calculate relative bar widths
|
||||||
|
const maxCount = Math.max(...data.map(q => q.count));
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="bg-white p-6 rounded-2xl shadow-sm border border-gray-100">
|
||||||
|
<h3 className="text-lg font-semibold text-gray-900 mb-6">{title}</h3>
|
||||||
|
|
||||||
|
<div className="space-y-6">
|
||||||
|
{data.map((question, index) => {
|
||||||
|
const percentage = maxCount > 0 ? (question.count / maxCount) * 100 : 0;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={index} className="group">
|
||||||
|
{/* Rank and Question */}
|
||||||
|
<div className="flex items-start gap-4 mb-3">
|
||||||
|
<div className="flex-shrink-0 w-8 h-8 bg-gray-100 text-gray-900 text-sm font-semibold rounded-full flex items-center justify-center">
|
||||||
|
{index + 1}
|
||||||
|
</div>
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<p className="text-sm font-medium text-gray-900 leading-relaxed mb-2">
|
||||||
|
{question.question}
|
||||||
|
</p>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="flex-1 mr-4">
|
||||||
|
<div className="w-full bg-gray-100 rounded-full h-2">
|
||||||
|
<div
|
||||||
|
className="bg-blue-600 h-2 rounded-full transition-all duration-500 ease-out"
|
||||||
|
style={{ width: `${percentage}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<span className="text-sm font-semibold text-gray-900 min-w-0">
|
||||||
|
{question.count} times
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Summary */}
|
||||||
|
<div className="mt-8 pt-6 border-t border-gray-100">
|
||||||
|
<div className="flex justify-between items-center">
|
||||||
|
<span className="text-sm text-gray-600">Total questions analyzed</span>
|
||||||
|
<span className="text-sm font-semibold text-gray-900">
|
||||||
|
{data.reduce((sum, q) => sum + q.count, 0)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -22,7 +22,7 @@ export default function WelcomeBanner({ companyName }: WelcomeBannerProps) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="bg-gradient-to-r from-blue-600 to-indigo-700 text-white p-6 rounded-xl shadow-lg mb-8">
|
<div className="bg-linear-to-r from-blue-600 to-indigo-700 text-white p-6 rounded-xl shadow-lg mb-8">
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-between items-center">
|
||||||
<div>
|
<div>
|
||||||
<h1 className="text-3xl font-bold">
|
<h1 className="text-3xl font-bold">
|
||||||
|
|||||||
105
components/charts/bar-chart.tsx
Normal file
105
components/charts/bar-chart.tsx
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import {
|
||||||
|
BarChart,
|
||||||
|
Bar,
|
||||||
|
XAxis,
|
||||||
|
YAxis,
|
||||||
|
CartesianGrid,
|
||||||
|
Tooltip,
|
||||||
|
ResponsiveContainer,
|
||||||
|
Cell,
|
||||||
|
} from "recharts";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
|
||||||
|
interface BarChartProps {
|
||||||
|
data: Array<{ name: string; value: number; [key: string]: any }>;
|
||||||
|
title?: string;
|
||||||
|
dataKey?: string;
|
||||||
|
colors?: string[];
|
||||||
|
height?: number;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const CustomTooltip = ({ active, payload, label }: any) => {
|
||||||
|
if (active && payload && payload.length) {
|
||||||
|
return (
|
||||||
|
<div className="rounded-lg border bg-background p-3 shadow-md">
|
||||||
|
<p className="text-sm font-medium">{label}</p>
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
<span className="font-medium text-foreground">
|
||||||
|
{payload[0].value}
|
||||||
|
</span>{" "}
|
||||||
|
sessions
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function ModernBarChart({
|
||||||
|
data,
|
||||||
|
title,
|
||||||
|
dataKey = "value",
|
||||||
|
colors = [
|
||||||
|
"rgb(37, 99, 235)", // Blue (primary)
|
||||||
|
"rgb(107, 114, 128)", // Gray
|
||||||
|
"rgb(236, 72, 153)", // Pink
|
||||||
|
"rgb(34, 197, 94)", // Lime green
|
||||||
|
"rgb(168, 85, 247)", // Purple
|
||||||
|
],
|
||||||
|
height = 300,
|
||||||
|
className,
|
||||||
|
}: BarChartProps) {
|
||||||
|
return (
|
||||||
|
<Card className={className}>
|
||||||
|
{title && (
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-lg font-semibold">{title}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
)}
|
||||||
|
<CardContent>
|
||||||
|
<ResponsiveContainer width="100%" height={height}>
|
||||||
|
<BarChart data={data} margin={{ top: 5, right: 30, left: 20, bottom: 5 }}>
|
||||||
|
<CartesianGrid
|
||||||
|
strokeDasharray="3 3"
|
||||||
|
stroke="rgb(229, 231, 235)"
|
||||||
|
strokeOpacity={0.5}
|
||||||
|
/>
|
||||||
|
<XAxis
|
||||||
|
dataKey="name"
|
||||||
|
stroke="rgb(100, 116, 139)"
|
||||||
|
fontSize={12}
|
||||||
|
tickLine={false}
|
||||||
|
axisLine={false}
|
||||||
|
angle={-45}
|
||||||
|
textAnchor="end"
|
||||||
|
height={80}
|
||||||
|
/>
|
||||||
|
<YAxis
|
||||||
|
stroke="rgb(100, 116, 139)"
|
||||||
|
fontSize={12}
|
||||||
|
tickLine={false}
|
||||||
|
axisLine={false}
|
||||||
|
/>
|
||||||
|
<Tooltip content={<CustomTooltip />} />
|
||||||
|
<Bar
|
||||||
|
dataKey={dataKey}
|
||||||
|
radius={[4, 4, 0, 0]}
|
||||||
|
className="transition-all duration-200"
|
||||||
|
>
|
||||||
|
{data.map((entry, index) => (
|
||||||
|
<Cell
|
||||||
|
key={`cell-${index}`}
|
||||||
|
fill={colors[index % colors.length]}
|
||||||
|
className="hover:opacity-80"
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</Bar>
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
122
components/charts/donut-chart.tsx
Normal file
122
components/charts/donut-chart.tsx
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { PieChart, Pie, Cell, ResponsiveContainer, Tooltip, Legend } from "recharts";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
|
||||||
|
interface DonutChartProps {
|
||||||
|
data: Array<{ name: string; value: number; color?: string }>;
|
||||||
|
title?: string;
|
||||||
|
centerText?: {
|
||||||
|
title: string;
|
||||||
|
value: string | number;
|
||||||
|
};
|
||||||
|
colors?: string[];
|
||||||
|
height?: number;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const CustomTooltip = ({ active, payload }: any) => {
|
||||||
|
if (active && payload && payload.length) {
|
||||||
|
const data = payload[0];
|
||||||
|
return (
|
||||||
|
<div className="rounded-lg border bg-background p-3 shadow-md">
|
||||||
|
<p className="text-sm font-medium">{data.name}</p>
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
<span className="font-medium text-foreground">
|
||||||
|
{data.value}
|
||||||
|
</span>{" "}
|
||||||
|
sessions ({((data.value / data.payload.total) * 100).toFixed(1)}%)
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const CustomLegend = ({ payload }: any) => {
|
||||||
|
return (
|
||||||
|
<div className="flex flex-wrap justify-center gap-4 mt-4">
|
||||||
|
{payload.map((entry: any, index: number) => (
|
||||||
|
<div key={index} className="flex items-center gap-2">
|
||||||
|
<div
|
||||||
|
className="w-3 h-3 rounded-full"
|
||||||
|
style={{ backgroundColor: entry.color }}
|
||||||
|
/>
|
||||||
|
<span className="text-sm text-muted-foreground">{entry.value}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const CenterLabel = ({ centerText, total }: any) => {
|
||||||
|
if (!centerText) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="absolute inset-0 flex items-center justify-center pointer-events-none">
|
||||||
|
<div className="text-center">
|
||||||
|
<p className="text-2xl font-bold">{centerText.value}</p>
|
||||||
|
<p className="text-sm text-muted-foreground">{centerText.title}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function ModernDonutChart({
|
||||||
|
data,
|
||||||
|
title,
|
||||||
|
centerText,
|
||||||
|
colors = [
|
||||||
|
"rgb(37, 99, 235)", // Blue (primary)
|
||||||
|
"rgb(107, 114, 128)", // Gray
|
||||||
|
"rgb(236, 72, 153)", // Pink
|
||||||
|
"rgb(34, 197, 94)", // Lime green
|
||||||
|
"rgb(168, 85, 247)", // Purple
|
||||||
|
],
|
||||||
|
height = 300,
|
||||||
|
className,
|
||||||
|
}: DonutChartProps) {
|
||||||
|
const total = data.reduce((sum, item) => sum + item.value, 0);
|
||||||
|
const dataWithTotal = data.map(item => ({ ...item, total }));
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card className={className}>
|
||||||
|
{title && (
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-lg font-semibold">{title}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
)}
|
||||||
|
<CardContent>
|
||||||
|
<div className="relative">
|
||||||
|
<ResponsiveContainer width="100%" height={height}>
|
||||||
|
<PieChart>
|
||||||
|
<Pie
|
||||||
|
data={dataWithTotal}
|
||||||
|
cx="50%"
|
||||||
|
cy="50%"
|
||||||
|
innerRadius={60}
|
||||||
|
outerRadius={100}
|
||||||
|
paddingAngle={2}
|
||||||
|
dataKey="value"
|
||||||
|
className="transition-all duration-200"
|
||||||
|
>
|
||||||
|
{dataWithTotal.map((entry, index) => (
|
||||||
|
<Cell
|
||||||
|
key={`cell-${index}`}
|
||||||
|
fill={entry.color || colors[index % colors.length]}
|
||||||
|
className="hover:opacity-80 cursor-pointer"
|
||||||
|
stroke="white"
|
||||||
|
strokeWidth={2}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</Pie>
|
||||||
|
<Tooltip content={<CustomTooltip />} />
|
||||||
|
<Legend content={<CustomLegend />} />
|
||||||
|
</PieChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
<CenterLabel centerText={centerText} total={total} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
117
components/charts/line-chart.tsx
Normal file
117
components/charts/line-chart.tsx
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import {
|
||||||
|
LineChart,
|
||||||
|
Line,
|
||||||
|
XAxis,
|
||||||
|
YAxis,
|
||||||
|
CartesianGrid,
|
||||||
|
Tooltip,
|
||||||
|
ResponsiveContainer,
|
||||||
|
Area,
|
||||||
|
AreaChart,
|
||||||
|
} from "recharts";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
|
||||||
|
interface LineChartProps {
|
||||||
|
data: Array<{ date: string; value: number; [key: string]: any }>;
|
||||||
|
title?: string;
|
||||||
|
dataKey?: string;
|
||||||
|
color?: string;
|
||||||
|
gradient?: boolean;
|
||||||
|
height?: number;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const CustomTooltip = ({ active, payload, label }: any) => {
|
||||||
|
if (active && payload && payload.length) {
|
||||||
|
return (
|
||||||
|
<div className="rounded-lg border bg-background p-3 shadow-md">
|
||||||
|
<p className="text-sm font-medium">{label}</p>
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
<span className="font-medium text-foreground">
|
||||||
|
{payload[0].value}
|
||||||
|
</span>{" "}
|
||||||
|
sessions
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function ModernLineChart({
|
||||||
|
data,
|
||||||
|
title,
|
||||||
|
dataKey = "value",
|
||||||
|
color = "rgb(37, 99, 235)",
|
||||||
|
gradient = true,
|
||||||
|
height = 300,
|
||||||
|
className,
|
||||||
|
}: LineChartProps) {
|
||||||
|
const ChartComponent = gradient ? AreaChart : LineChart;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card className={className}>
|
||||||
|
{title && (
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-lg font-semibold">{title}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
)}
|
||||||
|
<CardContent>
|
||||||
|
<ResponsiveContainer width="100%" height={height}>
|
||||||
|
<ChartComponent data={data} margin={{ top: 5, right: 30, left: 20, bottom: 5 }}>
|
||||||
|
<defs>
|
||||||
|
{gradient && (
|
||||||
|
<linearGradient id="colorGradient" x1="0" y1="0" x2="0" y2="1">
|
||||||
|
<stop offset="5%" stopColor={color} stopOpacity={0.3} />
|
||||||
|
<stop offset="95%" stopColor={color} stopOpacity={0.05} />
|
||||||
|
</linearGradient>
|
||||||
|
)}
|
||||||
|
</defs>
|
||||||
|
<CartesianGrid
|
||||||
|
strokeDasharray="3 3"
|
||||||
|
stroke="rgb(229, 231, 235)"
|
||||||
|
strokeOpacity={0.5}
|
||||||
|
/>
|
||||||
|
<XAxis
|
||||||
|
dataKey="date"
|
||||||
|
stroke="rgb(100, 116, 139)"
|
||||||
|
fontSize={12}
|
||||||
|
tickLine={false}
|
||||||
|
axisLine={false}
|
||||||
|
/>
|
||||||
|
<YAxis
|
||||||
|
stroke="rgb(100, 116, 139)"
|
||||||
|
fontSize={12}
|
||||||
|
tickLine={false}
|
||||||
|
axisLine={false}
|
||||||
|
/>
|
||||||
|
<Tooltip content={<CustomTooltip />} />
|
||||||
|
|
||||||
|
{gradient ? (
|
||||||
|
<Area
|
||||||
|
type="monotone"
|
||||||
|
dataKey={dataKey}
|
||||||
|
stroke={color}
|
||||||
|
strokeWidth={2}
|
||||||
|
fill="url(#colorGradient)"
|
||||||
|
dot={{ fill: color, strokeWidth: 2, r: 4 }}
|
||||||
|
activeDot={{ r: 6, stroke: color, strokeWidth: 2 }}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<Line
|
||||||
|
type="monotone"
|
||||||
|
dataKey={dataKey}
|
||||||
|
stroke={color}
|
||||||
|
strokeWidth={2}
|
||||||
|
dot={{ fill: color, strokeWidth: 2, r: 4 }}
|
||||||
|
activeDot={{ r: 6, stroke: color, strokeWidth: 2 }}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</ChartComponent>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
46
components/ui/badge.tsx
Normal file
46
components/ui/badge.tsx
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import * as React from "react"
|
||||||
|
import { Slot } from "@radix-ui/react-slot"
|
||||||
|
import { cva, type VariantProps } from "class-variance-authority"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
const badgeVariants = cva(
|
||||||
|
"inline-flex items-center justify-center rounded-md border px-2 py-0.5 text-xs font-medium w-fit whitespace-nowrap shrink-0 [&>svg]:size-3 gap-1 [&>svg]:pointer-events-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive transition-[color,box-shadow] overflow-hidden",
|
||||||
|
{
|
||||||
|
variants: {
|
||||||
|
variant: {
|
||||||
|
default:
|
||||||
|
"border-transparent bg-primary text-primary-foreground [a&]:hover:bg-primary/90",
|
||||||
|
secondary:
|
||||||
|
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
|
||||||
|
destructive:
|
||||||
|
"border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||||
|
outline:
|
||||||
|
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
defaultVariants: {
|
||||||
|
variant: "default",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
function Badge({
|
||||||
|
className,
|
||||||
|
variant,
|
||||||
|
asChild = false,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"span"> &
|
||||||
|
VariantProps<typeof badgeVariants> & { asChild?: boolean }) {
|
||||||
|
const Comp = asChild ? Slot : "span"
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Comp
|
||||||
|
data-slot="badge"
|
||||||
|
className={cn(badgeVariants({ variant }), className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Badge, badgeVariants }
|
||||||
59
components/ui/button.tsx
Normal file
59
components/ui/button.tsx
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import * as React from "react"
|
||||||
|
import { Slot } from "@radix-ui/react-slot"
|
||||||
|
import { cva, type VariantProps } from "class-variance-authority"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
const buttonVariants = cva(
|
||||||
|
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-all disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg:not([class*='size-'])]:size-4 shrink-0 [&_svg]:shrink-0 outline-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
|
||||||
|
{
|
||||||
|
variants: {
|
||||||
|
variant: {
|
||||||
|
default:
|
||||||
|
"bg-primary text-primary-foreground shadow-xs hover:bg-primary/90",
|
||||||
|
destructive:
|
||||||
|
"bg-destructive text-white shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||||
|
outline:
|
||||||
|
"border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50",
|
||||||
|
secondary:
|
||||||
|
"bg-secondary text-secondary-foreground shadow-xs hover:bg-secondary/80",
|
||||||
|
ghost:
|
||||||
|
"hover:bg-accent hover:text-accent-foreground dark:hover:bg-accent/50",
|
||||||
|
link: "text-primary underline-offset-4 hover:underline",
|
||||||
|
},
|
||||||
|
size: {
|
||||||
|
default: "h-9 px-4 py-2 has-[>svg]:px-3",
|
||||||
|
sm: "h-8 rounded-md gap-1.5 px-3 has-[>svg]:px-2.5",
|
||||||
|
lg: "h-10 rounded-md px-6 has-[>svg]:px-4",
|
||||||
|
icon: "size-9",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
defaultVariants: {
|
||||||
|
variant: "default",
|
||||||
|
size: "default",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
function Button({
|
||||||
|
className,
|
||||||
|
variant,
|
||||||
|
size,
|
||||||
|
asChild = false,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"button"> &
|
||||||
|
VariantProps<typeof buttonVariants> & {
|
||||||
|
asChild?: boolean
|
||||||
|
}) {
|
||||||
|
const Comp = asChild ? Slot : "button"
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Comp
|
||||||
|
data-slot="button"
|
||||||
|
className={cn(buttonVariants({ variant, size, className }))}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Button, buttonVariants }
|
||||||
92
components/ui/card.tsx
Normal file
92
components/ui/card.tsx
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import * as React from "react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Card({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card"
|
||||||
|
className={cn(
|
||||||
|
"bg-white text-gray-900 flex flex-col gap-6 rounded-2xl border border-gray-100 py-6 shadow-sm",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-header"
|
||||||
|
className={cn(
|
||||||
|
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-1.5 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-title"
|
||||||
|
className={cn("leading-none font-semibold", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardDescription({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-description"
|
||||||
|
className={cn("text-muted-foreground text-sm", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardAction({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-action"
|
||||||
|
className={cn(
|
||||||
|
"col-start-2 row-span-2 row-start-1 self-start justify-self-end",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-content"
|
||||||
|
className={cn("px-6", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function CardFooter({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="card-footer"
|
||||||
|
className={cn("flex items-center px-6 [.border-t]:pt-6", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
Card,
|
||||||
|
CardHeader,
|
||||||
|
CardFooter,
|
||||||
|
CardTitle,
|
||||||
|
CardAction,
|
||||||
|
CardDescription,
|
||||||
|
CardContent,
|
||||||
|
}
|
||||||
257
components/ui/dropdown-menu.tsx
Normal file
257
components/ui/dropdown-menu.tsx
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import * as React from "react"
|
||||||
|
import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"
|
||||||
|
import { CheckIcon, ChevronRightIcon, CircleIcon } from "lucide-react"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function DropdownMenu({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Root>) {
|
||||||
|
return <DropdownMenuPrimitive.Root data-slot="dropdown-menu" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuPortal({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Portal>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Portal data-slot="dropdown-menu-portal" {...props} />
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuTrigger({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Trigger>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Trigger
|
||||||
|
data-slot="dropdown-menu-trigger"
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuContent({
|
||||||
|
className,
|
||||||
|
sideOffset = 4,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Content>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Portal>
|
||||||
|
<DropdownMenuPrimitive.Content
|
||||||
|
data-slot="dropdown-menu-content"
|
||||||
|
sideOffset={sideOffset}
|
||||||
|
className={cn(
|
||||||
|
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 max-h-(--radix-dropdown-menu-content-available-height) min-w-32 origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
</DropdownMenuPrimitive.Portal>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuGroup({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Group>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Group data-slot="dropdown-menu-group" {...props} />
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuItem({
|
||||||
|
className,
|
||||||
|
inset,
|
||||||
|
variant = "default",
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Item> & {
|
||||||
|
inset?: boolean
|
||||||
|
variant?: "default" | "destructive"
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Item
|
||||||
|
data-slot="dropdown-menu-item"
|
||||||
|
data-inset={inset}
|
||||||
|
data-variant={variant}
|
||||||
|
className={cn(
|
||||||
|
"focus:bg-accent focus:text-accent-foreground data-[variant=destructive]:text-destructive data-[variant=destructive]:focus:bg-destructive/10 dark:data-[variant=destructive]:focus:bg-destructive/20 data-[variant=destructive]:focus:text-destructive data-[variant=destructive]:*:[svg]:text-destructive! [&_svg:not([class*='text-'])]:text-muted-foreground relative flex cursor-default items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-hidden select-none data-disabled:pointer-events-none data-disabled:opacity-50 data-inset:pl-8 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuCheckboxItem({
|
||||||
|
className,
|
||||||
|
children,
|
||||||
|
checked,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.CheckboxItem>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.CheckboxItem
|
||||||
|
data-slot="dropdown-menu-checkbox-item"
|
||||||
|
className={cn(
|
||||||
|
"focus:bg-accent focus:text-accent-foreground relative flex cursor-default items-center gap-2 rounded-sm py-1.5 pr-2 pl-8 text-sm outline-hidden select-none data-disabled:pointer-events-none data-disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
checked={checked}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<span className="pointer-events-none absolute left-2 flex size-3.5 items-center justify-center">
|
||||||
|
<DropdownMenuPrimitive.ItemIndicator>
|
||||||
|
<CheckIcon className="size-4" />
|
||||||
|
</DropdownMenuPrimitive.ItemIndicator>
|
||||||
|
</span>
|
||||||
|
{children}
|
||||||
|
</DropdownMenuPrimitive.CheckboxItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuRadioGroup({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.RadioGroup>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.RadioGroup
|
||||||
|
data-slot="dropdown-menu-radio-group"
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuRadioItem({
|
||||||
|
className,
|
||||||
|
children,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.RadioItem>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.RadioItem
|
||||||
|
data-slot="dropdown-menu-radio-item"
|
||||||
|
className={cn(
|
||||||
|
"focus:bg-accent focus:text-accent-foreground relative flex cursor-default items-center gap-2 rounded-sm py-1.5 pr-2 pl-8 text-sm outline-hidden select-none data-disabled:pointer-events-none data-disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<span className="pointer-events-none absolute left-2 flex size-3.5 items-center justify-center">
|
||||||
|
<DropdownMenuPrimitive.ItemIndicator>
|
||||||
|
<CircleIcon className="size-2 fill-current" />
|
||||||
|
</DropdownMenuPrimitive.ItemIndicator>
|
||||||
|
</span>
|
||||||
|
{children}
|
||||||
|
</DropdownMenuPrimitive.RadioItem>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuLabel({
|
||||||
|
className,
|
||||||
|
inset,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Label> & {
|
||||||
|
inset?: boolean
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Label
|
||||||
|
data-slot="dropdown-menu-label"
|
||||||
|
data-inset={inset}
|
||||||
|
className={cn(
|
||||||
|
"px-2 py-1.5 text-sm font-medium data-inset:pl-8",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuSeparator({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Separator>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.Separator
|
||||||
|
data-slot="dropdown-menu-separator"
|
||||||
|
className={cn("bg-border -mx-1 my-1 h-px", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuShortcut({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<"span">) {
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
data-slot="dropdown-menu-shortcut"
|
||||||
|
className={cn(
|
||||||
|
"text-muted-foreground ml-auto text-xs tracking-widest",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuSub({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.Sub>) {
|
||||||
|
return <DropdownMenuPrimitive.Sub data-slot="dropdown-menu-sub" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuSubTrigger({
|
||||||
|
className,
|
||||||
|
inset,
|
||||||
|
children,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.SubTrigger> & {
|
||||||
|
inset?: boolean
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.SubTrigger
|
||||||
|
data-slot="dropdown-menu-sub-trigger"
|
||||||
|
data-inset={inset}
|
||||||
|
className={cn(
|
||||||
|
"focus:bg-accent focus:text-accent-foreground data-[state=open]:bg-accent data-[state=open]:text-accent-foreground flex cursor-default items-center rounded-sm px-2 py-1.5 text-sm outline-hidden select-none data-inset:pl-8",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
<ChevronRightIcon className="ml-auto size-4" />
|
||||||
|
</DropdownMenuPrimitive.SubTrigger>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function DropdownMenuSubContent({
|
||||||
|
className,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof DropdownMenuPrimitive.SubContent>) {
|
||||||
|
return (
|
||||||
|
<DropdownMenuPrimitive.SubContent
|
||||||
|
data-slot="dropdown-menu-sub-content"
|
||||||
|
className={cn(
|
||||||
|
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 min-w-32 origin-(--radix-dropdown-menu-content-transform-origin) overflow-hidden rounded-md border p-1 shadow-lg",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
DropdownMenu,
|
||||||
|
DropdownMenuPortal,
|
||||||
|
DropdownMenuTrigger,
|
||||||
|
DropdownMenuContent,
|
||||||
|
DropdownMenuGroup,
|
||||||
|
DropdownMenuLabel,
|
||||||
|
DropdownMenuItem,
|
||||||
|
DropdownMenuCheckboxItem,
|
||||||
|
DropdownMenuRadioGroup,
|
||||||
|
DropdownMenuRadioItem,
|
||||||
|
DropdownMenuSeparator,
|
||||||
|
DropdownMenuShortcut,
|
||||||
|
DropdownMenuSub,
|
||||||
|
DropdownMenuSubTrigger,
|
||||||
|
DropdownMenuSubContent,
|
||||||
|
}
|
||||||
150
components/ui/metric-card.tsx
Normal file
150
components/ui/metric-card.tsx
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { Card, CardContent, CardHeader } from "@/components/ui/card";
|
||||||
|
import { Badge } from "@/components/ui/badge";
|
||||||
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
import { cn } from "@/lib/utils";
|
||||||
|
import { TrendingUp, TrendingDown, Minus } from "lucide-react";
|
||||||
|
|
||||||
|
interface MetricCardProps {
|
||||||
|
title: string;
|
||||||
|
value: string | number | null | undefined;
|
||||||
|
description?: string;
|
||||||
|
icon?: React.ReactNode;
|
||||||
|
trend?: {
|
||||||
|
value: number;
|
||||||
|
label?: string;
|
||||||
|
isPositive?: boolean;
|
||||||
|
};
|
||||||
|
variant?: "default" | "primary" | "success" | "warning" | "danger";
|
||||||
|
isLoading?: boolean;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function MetricCard({
|
||||||
|
title,
|
||||||
|
value,
|
||||||
|
description,
|
||||||
|
icon,
|
||||||
|
trend,
|
||||||
|
variant = "default",
|
||||||
|
isLoading = false,
|
||||||
|
className,
|
||||||
|
}: MetricCardProps) {
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<Card className={cn("relative overflow-hidden", className)}>
|
||||||
|
<CardHeader className="pb-3">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<Skeleton className="h-4 w-24" />
|
||||||
|
<Skeleton className="h-10 w-10 rounded-full" />
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Skeleton className="h-8 w-16 mb-2" />
|
||||||
|
<Skeleton className="h-3 w-20" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const getVariantClasses = () => {
|
||||||
|
switch (variant) {
|
||||||
|
case "primary":
|
||||||
|
return "border border-blue-100 bg-white shadow-sm hover:shadow-md";
|
||||||
|
case "success":
|
||||||
|
return "border border-green-100 bg-white shadow-sm hover:shadow-md";
|
||||||
|
case "warning":
|
||||||
|
return "border border-pink-100 bg-white shadow-sm hover:shadow-md";
|
||||||
|
case "danger":
|
||||||
|
return "border border-red-100 bg-white shadow-sm hover:shadow-md";
|
||||||
|
default:
|
||||||
|
return "border border-gray-100 bg-white shadow-sm hover:shadow-md";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getIconClasses = () => {
|
||||||
|
return "bg-gray-50 text-gray-900 border-gray-100";
|
||||||
|
};
|
||||||
|
|
||||||
|
const getTrendIcon = () => {
|
||||||
|
if (!trend) return null;
|
||||||
|
|
||||||
|
if (trend.value === 0) {
|
||||||
|
return <Minus className="h-3 w-3" />;
|
||||||
|
}
|
||||||
|
|
||||||
|
return trend.isPositive !== false ? (
|
||||||
|
<TrendingUp className="h-3 w-3" />
|
||||||
|
) : (
|
||||||
|
<TrendingDown className="h-3 w-3" />
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getTrendColor = () => {
|
||||||
|
if (!trend || trend.value === 0) return "text-muted-foreground";
|
||||||
|
return trend.isPositive !== false ? "text-green-600 dark:text-green-400" : "text-red-600 dark:text-red-400";
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card
|
||||||
|
className={cn(
|
||||||
|
"relative overflow-hidden transition-all duration-200 hover:shadow-lg hover:-translate-y-0.5",
|
||||||
|
getVariantClasses(),
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
|
||||||
|
<CardHeader className="pb-3 relative">
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="space-y-1">
|
||||||
|
<p className="text-sm font-medium text-gray-900 leading-none">
|
||||||
|
{title}
|
||||||
|
</p>
|
||||||
|
{description && (
|
||||||
|
<p className="text-xs text-muted-foreground/80">
|
||||||
|
{description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{icon && (
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"flex h-10 w-10 shrink-0 items-center justify-center rounded-full border transition-colors",
|
||||||
|
getIconClasses()
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<span className="text-lg">{icon}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
|
||||||
|
<CardContent className="relative">
|
||||||
|
<div className="flex items-end justify-between">
|
||||||
|
<div className="space-y-1">
|
||||||
|
<p className="text-2xl font-bold tracking-tight text-gray-900">
|
||||||
|
{value ?? "—"}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{trend && (
|
||||||
|
<Badge
|
||||||
|
variant="secondary"
|
||||||
|
className={cn(
|
||||||
|
"text-xs font-medium px-2 py-0.5 gap-1",
|
||||||
|
getTrendColor(),
|
||||||
|
"bg-background/50 border-current/20"
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{getTrendIcon()}
|
||||||
|
{Math.abs(trend.value).toFixed(1)}%
|
||||||
|
{trend.label && ` ${trend.label}`}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
28
components/ui/separator.tsx
Normal file
28
components/ui/separator.tsx
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import * as React from "react"
|
||||||
|
import * as SeparatorPrimitive from "@radix-ui/react-separator"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Separator({
|
||||||
|
className,
|
||||||
|
orientation = "horizontal",
|
||||||
|
decorative = true,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof SeparatorPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<SeparatorPrimitive.Root
|
||||||
|
data-slot="separator"
|
||||||
|
decorative={decorative}
|
||||||
|
orientation={orientation}
|
||||||
|
className={cn(
|
||||||
|
"bg-border shrink-0 data-[orientation=horizontal]:h-px data-[orientation=horizontal]:w-full data-[orientation=vertical]:h-full data-[orientation=vertical]:w-px",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Separator }
|
||||||
13
components/ui/skeleton.tsx
Normal file
13
components/ui/skeleton.tsx
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function Skeleton({ className, ...props }: React.ComponentProps<"div">) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
data-slot="skeleton"
|
||||||
|
className={cn("bg-accent animate-pulse rounded-md", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Skeleton }
|
||||||
61
components/ui/tooltip.tsx
Normal file
61
components/ui/tooltip.tsx
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import * as React from "react"
|
||||||
|
import * as TooltipPrimitive from "@radix-ui/react-tooltip"
|
||||||
|
|
||||||
|
import { cn } from "@/lib/utils"
|
||||||
|
|
||||||
|
function TooltipProvider({
|
||||||
|
delayDuration = 0,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Provider>) {
|
||||||
|
return (
|
||||||
|
<TooltipPrimitive.Provider
|
||||||
|
data-slot="tooltip-provider"
|
||||||
|
delayDuration={delayDuration}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function Tooltip({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Root>) {
|
||||||
|
return (
|
||||||
|
<TooltipProvider>
|
||||||
|
<TooltipPrimitive.Root data-slot="tooltip" {...props} />
|
||||||
|
</TooltipProvider>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function TooltipTrigger({
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Trigger>) {
|
||||||
|
return <TooltipPrimitive.Trigger data-slot="tooltip-trigger" {...props} />
|
||||||
|
}
|
||||||
|
|
||||||
|
function TooltipContent({
|
||||||
|
className,
|
||||||
|
sideOffset = 0,
|
||||||
|
children,
|
||||||
|
...props
|
||||||
|
}: React.ComponentProps<typeof TooltipPrimitive.Content>) {
|
||||||
|
return (
|
||||||
|
<TooltipPrimitive.Portal>
|
||||||
|
<TooltipPrimitive.Content
|
||||||
|
data-slot="tooltip-content"
|
||||||
|
sideOffset={sideOffset}
|
||||||
|
className={cn(
|
||||||
|
"bg-primary text-primary-foreground animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
|
||||||
|
className
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
<TooltipPrimitive.Arrow className="bg-primary fill-primary z-50 size-2.5 translate-y-[calc(-50%-2px)] rotate-45 rounded-[2px]" />
|
||||||
|
</TooltipPrimitive.Content>
|
||||||
|
</TooltipPrimitive.Portal>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }
|
||||||
81
debug-import-status.ts
Normal file
81
debug-import-status.ts
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
async function debugImportStatus() {
|
||||||
|
try {
|
||||||
|
console.log('=== DEBUGGING PROCESSING STATUS (REFACTORED SYSTEM) ===\n');
|
||||||
|
|
||||||
|
// Get pipeline status using the new system
|
||||||
|
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||||
|
|
||||||
|
console.log(`Total Sessions: ${pipelineStatus.totalSessions}\n`);
|
||||||
|
|
||||||
|
// Display status for each stage
|
||||||
|
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||||
|
|
||||||
|
for (const stage of stages) {
|
||||||
|
console.log(`${stage}:`);
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
|
||||||
|
const pending = stageData.PENDING || 0;
|
||||||
|
const inProgress = stageData.IN_PROGRESS || 0;
|
||||||
|
const completed = stageData.COMPLETED || 0;
|
||||||
|
const failed = stageData.FAILED || 0;
|
||||||
|
const skipped = stageData.SKIPPED || 0;
|
||||||
|
|
||||||
|
console.log(` PENDING: ${pending}`);
|
||||||
|
console.log(` IN_PROGRESS: ${inProgress}`);
|
||||||
|
console.log(` COMPLETED: ${completed}`);
|
||||||
|
console.log(` FAILED: ${failed}`);
|
||||||
|
console.log(` SKIPPED: ${skipped}`);
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Sessions vs SessionImports
|
||||||
|
console.log('=== SESSION IMPORT RELATIONSHIP ===');
|
||||||
|
const sessionsWithImports = await prisma.session.count({
|
||||||
|
where: { importId: { not: null } }
|
||||||
|
});
|
||||||
|
const totalSessions = await prisma.session.count();
|
||||||
|
|
||||||
|
console.log(` Sessions with importId: ${sessionsWithImports}`);
|
||||||
|
console.log(` Total sessions: ${totalSessions}`);
|
||||||
|
|
||||||
|
// Show failed sessions if any
|
||||||
|
const failedSessions = await ProcessingStatusManager.getFailedSessions();
|
||||||
|
if (failedSessions.length > 0) {
|
||||||
|
console.log('\n=== FAILED SESSIONS ===');
|
||||||
|
failedSessions.slice(0, 10).forEach(failure => {
|
||||||
|
console.log(` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (failedSessions.length > 10) {
|
||||||
|
console.log(` ... and ${failedSessions.length - 10} more failed sessions`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log('\n✓ No failed sessions found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show what needs processing
|
||||||
|
console.log('\n=== WHAT NEEDS PROCESSING ===');
|
||||||
|
|
||||||
|
for (const stage of stages) {
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
const pending = stageData.PENDING || 0;
|
||||||
|
const failed = stageData.FAILED || 0;
|
||||||
|
|
||||||
|
if (pending > 0 || failed > 0) {
|
||||||
|
console.log(`• ${stage}: ${pending} pending, ${failed} failed`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error debugging processing status:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debugImportStatus();
|
||||||
@ -12,10 +12,10 @@ The WordCloud component visualizes categories or topics based on their frequency
|
|||||||
|
|
||||||
**Features:**
|
**Features:**
|
||||||
|
|
||||||
- Dynamic sizing based on frequency
|
- Dynamic sizing based on frequency
|
||||||
- Colorful display with a pleasing color palette
|
- Colorful display with a pleasing color palette
|
||||||
- Responsive design
|
- Responsive design
|
||||||
- Interactive hover effects
|
- Interactive hover effects
|
||||||
|
|
||||||
### 2. GeographicMap
|
### 2. GeographicMap
|
||||||
|
|
||||||
@ -25,10 +25,10 @@ This component displays a world map with circles representing the number of sess
|
|||||||
|
|
||||||
**Features:**
|
**Features:**
|
||||||
|
|
||||||
- Interactive map using React Leaflet
|
- Interactive map using React Leaflet
|
||||||
- Circle sizes scaled by session count
|
- Circle sizes scaled by session count
|
||||||
- Tooltips showing country names and session counts
|
- Tooltips showing country names and session counts
|
||||||
- Responsive design
|
- Responsive design
|
||||||
|
|
||||||
### 3. MetricCard
|
### 3. MetricCard
|
||||||
|
|
||||||
@ -38,10 +38,10 @@ A modern, visually appealing card for displaying key metrics.
|
|||||||
|
|
||||||
**Features:**
|
**Features:**
|
||||||
|
|
||||||
- Multiple design variants (default, primary, success, warning, danger)
|
- Multiple design variants (default, primary, success, warning, danger)
|
||||||
- Support for trend indicators
|
- Support for trend indicators
|
||||||
- Icons and descriptions
|
- Icons and descriptions
|
||||||
- Clean, modern styling
|
- Clean, modern styling
|
||||||
|
|
||||||
### 4. DonutChart
|
### 4. DonutChart
|
||||||
|
|
||||||
@ -51,10 +51,10 @@ An enhanced donut chart with better styling and a central text display capabilit
|
|||||||
|
|
||||||
**Features:**
|
**Features:**
|
||||||
|
|
||||||
- Customizable colors
|
- Customizable colors
|
||||||
- Center text area for displaying summaries
|
- Center text area for displaying summaries
|
||||||
- Interactive tooltips with percentages
|
- Interactive tooltips with percentages
|
||||||
- Well-balanced legend display
|
- Well-balanced legend display
|
||||||
|
|
||||||
### 5. ResponseTimeDistribution
|
### 5. ResponseTimeDistribution
|
||||||
|
|
||||||
@ -64,28 +64,28 @@ Visualizes the distribution of response times as a histogram.
|
|||||||
|
|
||||||
**Features:**
|
**Features:**
|
||||||
|
|
||||||
- Color-coded bars (green for fast, yellow for medium, red for slow)
|
- Color-coded bars (green for fast, yellow for medium, red for slow)
|
||||||
- Target time indicator
|
- Target time indicator
|
||||||
- Automatic binning of response times
|
- Automatic binning of response times
|
||||||
- Clear labeling and scales
|
- Clear labeling and scales
|
||||||
|
|
||||||
## Dashboard Enhancements
|
## Dashboard Enhancements
|
||||||
|
|
||||||
The dashboard has been enhanced with:
|
The dashboard has been enhanced with:
|
||||||
|
|
||||||
1. **Improved Layout**: Better use of space and responsive grid layouts
|
1. **Improved Layout**: Better use of space and responsive grid layouts
|
||||||
2. **Visual Hierarchies**: Clear heading styles and consistent spacing
|
2. **Visual Hierarchies**: Clear heading styles and consistent spacing
|
||||||
3. **Color Coding**: Semantic use of colors to indicate statuses
|
3. **Color Coding**: Semantic use of colors to indicate statuses
|
||||||
4. **Interactive Elements**: Better button styles with loading indicators
|
4. **Interactive Elements**: Better button styles with loading indicators
|
||||||
5. **Data Context**: More complete view of metrics with additional visualizations
|
5. **Data Context**: More complete view of metrics with additional visualizations
|
||||||
6. **Geographic Insights**: Map view of session distribution by country
|
6. **Geographic Insights**: Map view of session distribution by country
|
||||||
7. **Language Analysis**: Improved language distribution visualization
|
7. **Language Analysis**: Improved language distribution visualization
|
||||||
8. **Category Analysis**: Word cloud for category popularity
|
8. **Category Analysis**: Word cloud for category popularity
|
||||||
9. **Performance Metrics**: Response time distribution for better insight into system performance
|
9. **Performance Metrics**: Response time distribution for better insight into system performance
|
||||||
|
|
||||||
## Usage Notes
|
## Usage Notes
|
||||||
|
|
||||||
- The geographic map and response time distribution use simulated data where actual data is not available
|
- The geographic map and response time distribution use simulated data where actual data is not available
|
||||||
- All components are responsive and will adjust to different screen sizes
|
- All components are responsive and will adjust to different screen sizes
|
||||||
- The dashboard automatically refreshes data when using the refresh button
|
- The dashboard automatically refreshes data when using the refresh button
|
||||||
- Admin users have access to additional controls at the bottom of the dashboard
|
- Admin users have access to additional controls at the bottom of the dashboard
|
||||||
|
|||||||
130
docs/postgresql-migration.md
Normal file
130
docs/postgresql-migration.md
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
# PostgreSQL Migration Documentation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Successfully migrated the livedash-node application from SQLite to PostgreSQL using Neon as the database provider. This migration provides better scalability, performance, and production-readiness.
|
||||||
|
|
||||||
|
## Migration Summary
|
||||||
|
|
||||||
|
### What Was Changed
|
||||||
|
|
||||||
|
1. **Database Provider**: Changed from SQLite to PostgreSQL in `prisma/schema.prisma`
|
||||||
|
2. **Environment Configuration**: Updated to use environment-based database URL selection
|
||||||
|
3. **Test Setup**: Configured separate test database using `DATABASE_URL_TEST`
|
||||||
|
4. **Migration History**: Reset and created fresh PostgreSQL migrations
|
||||||
|
|
||||||
|
### Database Configuration
|
||||||
|
|
||||||
|
#### Production/Development
|
||||||
|
|
||||||
|
- **Provider**: PostgreSQL (Neon)
|
||||||
|
- **Environment Variable**: `DATABASE_URL`
|
||||||
|
- **Connection**: Neon PostgreSQL cluster
|
||||||
|
|
||||||
|
#### Testing
|
||||||
|
|
||||||
|
- **Provider**: PostgreSQL (Neon - separate database)
|
||||||
|
- **Environment Variable**: `DATABASE_URL_TEST`
|
||||||
|
- **Test Setup**: Automatically switches to test database during test runs
|
||||||
|
|
||||||
|
### Files Modified
|
||||||
|
|
||||||
|
1. **`prisma/schema.prisma`**
|
||||||
|
|
||||||
|
- Changed provider from `sqlite` to `postgresql`
|
||||||
|
- Updated URL to use `env("DATABASE_URL")`
|
||||||
|
|
||||||
|
2. **`tests/setup.ts`**
|
||||||
|
|
||||||
|
- Added logic to use `DATABASE_URL_TEST` when available
|
||||||
|
- Ensures test isolation with separate database
|
||||||
|
|
||||||
|
3. **`.env`** (created)
|
||||||
|
|
||||||
|
- Contains `DATABASE_URL` for Prisma CLI operations
|
||||||
|
|
||||||
|
4. **`.env.local`** (existing)
|
||||||
|
|
||||||
|
- Contains both `DATABASE_URL` and `DATABASE_URL_TEST`
|
||||||
|
|
||||||
|
### Database Schema
|
||||||
|
|
||||||
|
All existing models and relationships were preserved:
|
||||||
|
|
||||||
|
- **Company**: Multi-tenant root entity
|
||||||
|
- **User**: Authentication and authorization
|
||||||
|
- **Session**: Processed session data
|
||||||
|
- **SessionImport**: Raw CSV import data
|
||||||
|
- **Message**: Individual conversation messages
|
||||||
|
- **Question**: Normalized question storage
|
||||||
|
- **SessionQuestion**: Session-question relationships
|
||||||
|
- **AIProcessingRequest**: AI cost tracking
|
||||||
|
|
||||||
|
### Migration Process
|
||||||
|
|
||||||
|
1. **Schema Update**: Changed provider to PostgreSQL
|
||||||
|
2. **Migration Reset**: Removed SQLite migration history
|
||||||
|
3. **Fresh Migration**: Created new PostgreSQL migration
|
||||||
|
4. **Client Generation**: Generated new Prisma client for PostgreSQL
|
||||||
|
5. **Database Seeding**: Applied initial seed data
|
||||||
|
6. **Testing**: Verified all functionality works with PostgreSQL
|
||||||
|
|
||||||
|
### Benefits Achieved
|
||||||
|
|
||||||
|
✅ **Production-Ready**: PostgreSQL is enterprise-grade and scalable
|
||||||
|
✅ **Better Performance**: Superior query performance and optimization
|
||||||
|
✅ **Advanced Features**: Full JSON support, arrays, advanced indexing
|
||||||
|
✅ **Test Isolation**: Separate test database prevents data conflicts
|
||||||
|
✅ **Consistency**: Same database engine across all environments
|
||||||
|
✅ **Cloud-Native**: Neon provides managed PostgreSQL with excellent DX
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
```env
|
||||||
|
# Production/Development Database
|
||||||
|
DATABASE_URL="postgresql://user:pass@host/database?sslmode=require"
|
||||||
|
|
||||||
|
# Test Database (separate Neon database)
|
||||||
|
DATABASE_URL_TEST="postgresql://user:pass@test-host/test-database?sslmode=require"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Configuration
|
||||||
|
|
||||||
|
Tests automatically use the test database when `DATABASE_URL_TEST` is set:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In tests/setup.ts
|
||||||
|
if (process.env.DATABASE_URL_TEST) {
|
||||||
|
process.env.DATABASE_URL = process.env.DATABASE_URL_TEST;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Verification
|
||||||
|
|
||||||
|
All tests pass successfully:
|
||||||
|
|
||||||
|
- ✅ Environment configuration tests
|
||||||
|
- ✅ Transcript fetcher tests
|
||||||
|
- ✅ Database connection tests
|
||||||
|
- ✅ Schema validation tests
|
||||||
|
- ✅ CRUD operation tests
|
||||||
|
|
||||||
|
### Next Steps
|
||||||
|
|
||||||
|
1. **Data Import**: Import production data if needed
|
||||||
|
2. **Performance Monitoring**: Monitor query performance in production
|
||||||
|
3. **Backup Strategy**: Configure automated backups via Neon
|
||||||
|
4. **Connection Pooling**: Consider connection pooling for high-traffic scenarios
|
||||||
|
|
||||||
|
### Rollback Plan
|
||||||
|
|
||||||
|
If rollback is needed:
|
||||||
|
|
||||||
|
1. Revert `prisma/schema.prisma` to SQLite configuration
|
||||||
|
2. Restore SQLite migration files from git history
|
||||||
|
3. Update environment variables
|
||||||
|
4. Run `prisma migrate reset` and `prisma generate`
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
The PostgreSQL migration was successful and provides a solid foundation for production deployment. The application now benefits from PostgreSQL's advanced features while maintaining full test isolation and development workflow compatibility.
|
||||||
133
docs/processing-system-refactor.md
Normal file
133
docs/processing-system-refactor.md
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
# Processing System Refactor - Complete
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Successfully refactored the session processing pipeline from a simple status-based system to a comprehensive multi-stage processing status system. This addresses the original issues with the SessionImport table's `status` and `errorMsg` columns.
|
||||||
|
|
||||||
|
## Problems Solved
|
||||||
|
|
||||||
|
### Original Issues
|
||||||
|
1. **Inconsistent Status Tracking**: The old system used a simple enum on SessionImport that didn't properly track the multi-stage processing pipeline
|
||||||
|
2. **Poor Error Visibility**: Error messages were buried in the SessionImport table and not easily accessible
|
||||||
|
3. **No Stage-Specific Tracking**: The system couldn't track which specific stage of processing failed
|
||||||
|
4. **Difficult Recovery**: Failed sessions were hard to identify and retry
|
||||||
|
5. **Linting Errors**: Multiple TypeScript files referencing removed database fields
|
||||||
|
|
||||||
|
### Schema Changes Made
|
||||||
|
- **Removed** old `status`, `errorMsg`, and `processedAt` columns from SessionImport
|
||||||
|
- **Removed** `processed` field from Session
|
||||||
|
- **Added** new `SessionProcessingStatus` table with granular stage tracking
|
||||||
|
- **Added** `ProcessingStage` and `ProcessingStatus` enums
|
||||||
|
|
||||||
|
## New Processing Pipeline
|
||||||
|
|
||||||
|
### Processing Stages
|
||||||
|
```typescript
|
||||||
|
enum ProcessingStage {
|
||||||
|
CSV_IMPORT // SessionImport created
|
||||||
|
TRANSCRIPT_FETCH // Transcript content fetched
|
||||||
|
SESSION_CREATION // Session + Messages created
|
||||||
|
AI_ANALYSIS // AI processing completed
|
||||||
|
QUESTION_EXTRACTION // Questions extracted
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ProcessingStatus {
|
||||||
|
PENDING, IN_PROGRESS, COMPLETED, FAILED, SKIPPED
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Components
|
||||||
|
|
||||||
|
#### 1. ProcessingStatusManager
|
||||||
|
Centralized class for managing processing status with methods:
|
||||||
|
- `initializeSession()` - Set up processing status for new sessions
|
||||||
|
- `startStage()`, `completeStage()`, `failStage()`, `skipStage()` - Stage management
|
||||||
|
- `getSessionsNeedingProcessing()` - Query sessions by stage and status
|
||||||
|
- `getPipelineStatus()` - Get overview of entire pipeline
|
||||||
|
- `getFailedSessions()` - Find sessions needing retry
|
||||||
|
- `resetStageForRetry()` - Reset failed stages
|
||||||
|
|
||||||
|
#### 2. Updated Processing Scheduler
|
||||||
|
- Integrated with new `ProcessingStatusManager`
|
||||||
|
- Tracks AI analysis and question extraction stages
|
||||||
|
- Records detailed processing metadata
|
||||||
|
- Proper error handling and retry capabilities
|
||||||
|
|
||||||
|
#### 3. Migration System
|
||||||
|
- Successfully migrated all 109 existing sessions
|
||||||
|
- Determined current state based on existing data
|
||||||
|
- Preserved all existing functionality
|
||||||
|
|
||||||
|
## Current Pipeline Status
|
||||||
|
|
||||||
|
After migration and refactoring:
|
||||||
|
- **CSV_IMPORT**: 109 completed
|
||||||
|
- **TRANSCRIPT_FETCH**: 109 completed
|
||||||
|
- **SESSION_CREATION**: 109 completed
|
||||||
|
- **AI_ANALYSIS**: 16 completed, 93 pending
|
||||||
|
- **QUESTION_EXTRACTION**: 11 completed, 98 pending
|
||||||
|
|
||||||
|
## Files Updated/Created
|
||||||
|
|
||||||
|
### New Files
|
||||||
|
- `lib/processingStatusManager.ts` - Core processing status management
|
||||||
|
- `check-refactored-pipeline-status.ts` - New pipeline status checker
|
||||||
|
- `migrate-to-refactored-system.ts` - Migration script
|
||||||
|
- `docs/processing-system-refactor.md` - This documentation
|
||||||
|
|
||||||
|
### Updated Files
|
||||||
|
- `prisma/schema.prisma` - Added new processing status tables
|
||||||
|
- `lib/processingScheduler.ts` - Integrated with new status system
|
||||||
|
- `debug-import-status.ts` - Updated to use new system
|
||||||
|
- `fix-import-status.ts` - Updated to use new system
|
||||||
|
|
||||||
|
### Removed Files
|
||||||
|
- `check-pipeline-status.ts` - Replaced by refactored version
|
||||||
|
|
||||||
|
## Benefits Achieved
|
||||||
|
|
||||||
|
1. **Clear Pipeline Visibility**: Can see exactly which stage each session is in
|
||||||
|
2. **Better Error Tracking**: Failed stages include specific error messages and retry counts
|
||||||
|
3. **Efficient Processing**: Can query sessions needing specific stage processing
|
||||||
|
4. **Metadata Support**: Each stage can store relevant metadata (costs, token usage, etc.)
|
||||||
|
5. **Easy Recovery**: Failed sessions can be easily identified and retried
|
||||||
|
6. **Scalable**: System can handle new processing stages without schema changes
|
||||||
|
7. **No Linting Errors**: All TypeScript compilation issues resolved
|
||||||
|
|
||||||
|
## Usage Examples
|
||||||
|
|
||||||
|
### Check Pipeline Status
|
||||||
|
```bash
|
||||||
|
npx tsx check-refactored-pipeline-status.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug Processing Issues
|
||||||
|
```bash
|
||||||
|
npx tsx debug-import-status.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fix/Retry Failed Sessions
|
||||||
|
```bash
|
||||||
|
npx tsx fix-import-status.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Process Sessions
|
||||||
|
```bash
|
||||||
|
npx tsx test-ai-processing.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. **Test AI Processing**: Run AI processing on pending sessions
|
||||||
|
2. **Monitor Performance**: Watch for any issues with the new system
|
||||||
|
3. **Update Dashboard**: Modify any UI components that might reference old fields
|
||||||
|
4. **Documentation**: Update any API documentation that references the old system
|
||||||
|
|
||||||
|
## Migration Notes
|
||||||
|
|
||||||
|
- All existing data preserved
|
||||||
|
- No data loss during migration
|
||||||
|
- Backward compatibility maintained where possible
|
||||||
|
- System ready for production use
|
||||||
|
|
||||||
|
The refactored system provides much better visibility into the processing pipeline and makes it easy to identify and resolve any issues that arise during session processing.
|
||||||
79
docs/scheduler-fixes.md
Normal file
79
docs/scheduler-fixes.md
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# Scheduler Error Fixes
|
||||||
|
|
||||||
|
## Issues Identified and Resolved
|
||||||
|
|
||||||
|
### 1. Invalid Company Configuration
|
||||||
|
|
||||||
|
**Problem**: Company `26fc3d34-c074-4556-85bd-9a66fafc0e08` had an invalid CSV URL (`https://example.com/data.csv`) with no authentication credentials.
|
||||||
|
|
||||||
|
**Solution**:
|
||||||
|
|
||||||
|
- Added validation in `fetchAndStoreSessionsForAllCompanies()` to skip companies with example/invalid URLs
|
||||||
|
- Removed the invalid company record from the database using `fix_companies.js`
|
||||||
|
|
||||||
|
### 2. Transcript Fetching Errors
|
||||||
|
|
||||||
|
**Problem**: Multiple "Error fetching transcript: Unauthorized" messages were flooding the logs when individual transcript files couldn't be accessed.
|
||||||
|
|
||||||
|
**Solution**:
|
||||||
|
|
||||||
|
- Improved error handling in `fetchTranscriptContent()` function
|
||||||
|
- Added probabilistic logging (only ~10% of errors logged) to prevent log spam
|
||||||
|
- Added timeout (10 seconds) for transcript fetching
|
||||||
|
- Made transcript fetching failures non-blocking (sessions are still created without transcript content)
|
||||||
|
|
||||||
|
### 3. CSV Fetching Errors
|
||||||
|
|
||||||
|
**Problem**: "Failed to fetch CSV: Not Found" errors for companies with invalid URLs.
|
||||||
|
|
||||||
|
**Solution**:
|
||||||
|
|
||||||
|
- Added URL validation to skip companies with `example.com` URLs
|
||||||
|
- Improved error logging to be more descriptive
|
||||||
|
|
||||||
|
## Current Status
|
||||||
|
|
||||||
|
✅ **Fixed**: No more "Unauthorized" error spam
|
||||||
|
✅ **Fixed**: No more "Not Found" CSV errors
|
||||||
|
✅ **Fixed**: Scheduler runs cleanly without errors
|
||||||
|
✅ **Improved**: Better error handling and logging
|
||||||
|
|
||||||
|
## Remaining Companies
|
||||||
|
|
||||||
|
After cleanup, only valid companies remain:
|
||||||
|
|
||||||
|
- **Demo Company** (`790b9233-d369-451f-b92c-f4dceb42b649`)
|
||||||
|
- CSV URL: `https://proto.notso.ai/jumbo/chats`
|
||||||
|
- Has valid authentication credentials
|
||||||
|
- 107 sessions in database
|
||||||
|
|
||||||
|
## Files Modified
|
||||||
|
|
||||||
|
1. **lib/csvFetcher.js**
|
||||||
|
|
||||||
|
- Added company URL validation
|
||||||
|
- Improved transcript fetching error handling
|
||||||
|
- Reduced error log verbosity
|
||||||
|
|
||||||
|
2. **fix_companies.js** (cleanup script)
|
||||||
|
- Removes invalid company records
|
||||||
|
- Can be run again if needed
|
||||||
|
|
||||||
|
## Monitoring
|
||||||
|
|
||||||
|
The scheduler now runs cleanly every 15 minutes. To monitor:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check scheduler logs
|
||||||
|
node debug_db.js
|
||||||
|
|
||||||
|
# Test manual refresh
|
||||||
|
node -e "import('./lib/csvFetcher.js').then(m => m.fetchAndStoreSessionsForAllCompanies())"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future Improvements
|
||||||
|
|
||||||
|
1. Add health check endpoint for scheduler status
|
||||||
|
2. Add metrics for successful/failed fetches
|
||||||
|
3. Consider retry logic for temporary failures
|
||||||
|
4. Add alerting for persistent failures
|
||||||
211
docs/scheduler-workflow.md
Normal file
211
docs/scheduler-workflow.md
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
# Scheduler Workflow Documentation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The LiveDash system has two main schedulers that work together to fetch and process session data:
|
||||||
|
|
||||||
|
1. **Session Refresh Scheduler** - Fetches new sessions from CSV files
|
||||||
|
2. **Processing Scheduler** - Processes session transcripts with AI
|
||||||
|
|
||||||
|
## Current Status (as of latest check)
|
||||||
|
|
||||||
|
- **Total sessions**: 107
|
||||||
|
- **Processed sessions**: 0
|
||||||
|
- **Sessions with transcript**: 0
|
||||||
|
- **Ready for processing**: 0
|
||||||
|
|
||||||
|
## How the `processed` Field Works
|
||||||
|
|
||||||
|
The ProcessingScheduler picks up sessions where `processed` is **NOT** `true`, which includes:
|
||||||
|
|
||||||
|
- `processed = false`
|
||||||
|
- `processed = null`
|
||||||
|
|
||||||
|
**Query used:**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{ processed: { not: true } } // Either false or null
|
||||||
|
```
|
||||||
|
|
||||||
|
## Complete Workflow
|
||||||
|
|
||||||
|
### Step 1: Session Refresh (CSV Fetching)
|
||||||
|
|
||||||
|
**What it does:**
|
||||||
|
|
||||||
|
- Fetches session data from company CSV URLs
|
||||||
|
- Creates session records in database with basic metadata
|
||||||
|
- Sets `transcriptContent = null` initially
|
||||||
|
- Sets `processed = null` initially
|
||||||
|
|
||||||
|
**Runs:** Every 30 minutes (cron: `*/30 * * * *`)
|
||||||
|
|
||||||
|
### Step 2: Transcript Fetching
|
||||||
|
|
||||||
|
**What it does:**
|
||||||
|
|
||||||
|
- Downloads full transcript content for sessions
|
||||||
|
- Updates `transcriptContent` field with actual conversation data
|
||||||
|
- Sessions remain `processed = null` until AI processing
|
||||||
|
|
||||||
|
**Runs:** As part of session refresh process
|
||||||
|
|
||||||
|
### Step 3: AI Processing
|
||||||
|
|
||||||
|
**What it does:**
|
||||||
|
|
||||||
|
- Finds sessions with transcript content where `processed != true`
|
||||||
|
- Sends transcripts to OpenAI for analysis
|
||||||
|
- Extracts: sentiment, category, questions, summary, etc.
|
||||||
|
- Updates session with processed data
|
||||||
|
- Sets `processed = true`
|
||||||
|
|
||||||
|
**Runs:** Every hour (cron: `0 * * * *`)
|
||||||
|
|
||||||
|
## Manual Trigger Commands
|
||||||
|
|
||||||
|
### Check Current Status
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Trigger Session Refresh (Fetch new sessions from CSV)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js refresh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Trigger AI Processing (Process unprocessed sessions)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js process
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Both Schedulers
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js both
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### No Sessions Being Processed?
|
||||||
|
|
||||||
|
1. **Check if sessions have transcripts:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js status
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **If "Sessions with transcript" is 0:**
|
||||||
|
- Sessions exist but transcripts haven't been fetched yet
|
||||||
|
- Run session refresh: `node scripts/manual-triggers.js refresh`
|
||||||
|
|
||||||
|
3. **If "Ready for processing" is 0 but "Sessions with transcript" > 0:**
|
||||||
|
- All sessions with transcripts have already been processed
|
||||||
|
- Check if `OPENAI_API_KEY` is set in environment
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
#### "No sessions found requiring processing"
|
||||||
|
|
||||||
|
- All sessions with transcripts have been processed (`processed = true`)
|
||||||
|
- Or no sessions have transcript content yet
|
||||||
|
|
||||||
|
#### "OPENAI_API_KEY environment variable is not set"
|
||||||
|
|
||||||
|
- Add OpenAI API key to `.env.development` file
|
||||||
|
- Restart the application
|
||||||
|
|
||||||
|
#### "Error fetching transcript: Unauthorized"
|
||||||
|
|
||||||
|
- CSV credentials are incorrect or expired
|
||||||
|
- Check company CSV username/password in database
|
||||||
|
|
||||||
|
## Database Field Mapping
|
||||||
|
|
||||||
|
### Before AI Processing
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
id: "session-uuid",
|
||||||
|
transcriptContent: "full conversation text" | null,
|
||||||
|
processed: null,
|
||||||
|
sentimentCategory: null,
|
||||||
|
questions: null,
|
||||||
|
summary: null,
|
||||||
|
// ... other fields
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### After AI Processing
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
id: "session-uuid",
|
||||||
|
transcriptContent: "full conversation text",
|
||||||
|
processed: true,
|
||||||
|
sentimentCategory: "positive" | "neutral" | "negative",
|
||||||
|
questions: '["question 1", "question 2"]', // JSON string
|
||||||
|
summary: "Brief conversation summary",
|
||||||
|
language: "en", // ISO 639-1 code
|
||||||
|
messagesSent: 5,
|
||||||
|
sentiment: 0.8, // Float value (-1 to 1)
|
||||||
|
escalated: false,
|
||||||
|
forwardedHr: false,
|
||||||
|
category: "Schedule & Hours",
|
||||||
|
// ... other fields
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Scheduler Configuration
|
||||||
|
|
||||||
|
### Session Refresh Scheduler
|
||||||
|
|
||||||
|
- **File**: `lib/scheduler.js`
|
||||||
|
- **Frequency**: Every 30 minutes
|
||||||
|
- **Cron**: `*/30 * * * *`
|
||||||
|
|
||||||
|
### Processing Scheduler
|
||||||
|
|
||||||
|
- **File**: `lib/processingScheduler.js`
|
||||||
|
- **Frequency**: Every hour
|
||||||
|
- **Cron**: `0 * * * *`
|
||||||
|
- **Batch size**: 10 sessions per run
|
||||||
|
|
||||||
|
## Environment Variables Required
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Database
|
||||||
|
DATABASE_URL="postgresql://..."
|
||||||
|
|
||||||
|
# OpenAI (for processing)
|
||||||
|
OPENAI_API_KEY="sk-..."
|
||||||
|
|
||||||
|
# NextAuth
|
||||||
|
NEXTAUTH_SECRET="..."
|
||||||
|
NEXTAUTH_URL="http://localhost:3000"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps for Testing
|
||||||
|
|
||||||
|
1. **Trigger session refresh** to fetch transcripts:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js refresh
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Check status** to see if transcripts were fetched:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js status
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Trigger processing** if transcripts are available:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node scripts/manual-triggers.js process
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **View results** in the dashboard session details pages
|
||||||
86
docs/session-processing.md
Normal file
86
docs/session-processing.md
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
# Session Processing with OpenAI
|
||||||
|
|
||||||
|
This document explains how the session processing system works in LiveDash-Node.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The system now includes an automated process for analyzing chat session transcripts using OpenAI's API. This process:
|
||||||
|
|
||||||
|
1. Fetches session data from CSV sources
|
||||||
|
2. Only adds new sessions that don't already exist in the database
|
||||||
|
3. Processes session transcripts with OpenAI to extract valuable insights
|
||||||
|
4. Updates the database with the processed information
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
### Session Fetching
|
||||||
|
|
||||||
|
- The system fetches session data from configured CSV URLs for each company
|
||||||
|
- Unlike the previous implementation, it now only adds sessions that don't already exist in the database
|
||||||
|
- This prevents duplicate sessions and allows for incremental updates
|
||||||
|
|
||||||
|
### Transcript Processing
|
||||||
|
|
||||||
|
- For sessions with transcript content that haven't been processed yet, the system calls OpenAI's API
|
||||||
|
- The API analyzes the transcript and extracts the following information:
|
||||||
|
- Primary language used (ISO 639-1 code)
|
||||||
|
- Number of messages sent by the user
|
||||||
|
- Overall sentiment (positive, neutral, negative)
|
||||||
|
- Whether the conversation was escalated
|
||||||
|
- Whether HR contact was mentioned or provided
|
||||||
|
- Best-fitting category for the conversation
|
||||||
|
- Up to 5 paraphrased questions asked by the user
|
||||||
|
- A brief summary of the conversation
|
||||||
|
|
||||||
|
### Scheduling
|
||||||
|
|
||||||
|
The system includes two schedulers:
|
||||||
|
|
||||||
|
1. **Session Refresh Scheduler**: Runs every 15 minutes to fetch new sessions from CSV sources
|
||||||
|
2. **Session Processing Scheduler**: Runs every hour to process unprocessed sessions with OpenAI
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
The Session model has been updated with new fields to store the processed data:
|
||||||
|
|
||||||
|
- `processed`: Boolean flag indicating whether the session has been processed
|
||||||
|
- `sentimentCategory`: String value ("positive", "neutral", "negative") from OpenAI
|
||||||
|
- `questions`: JSON array of questions asked by the user
|
||||||
|
- `summary`: Brief summary of the conversation
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### OpenAI API Key
|
||||||
|
|
||||||
|
To use the session processing feature, you need to add your OpenAI API key to the `.env.local` file:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
OPENAI_API_KEY=your_api_key_here
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running with Schedulers
|
||||||
|
|
||||||
|
To run the application with schedulers enabled:
|
||||||
|
|
||||||
|
- Development: `npm run dev`
|
||||||
|
- Development (with schedulers disabled): `npm run dev:no-schedulers`
|
||||||
|
- Production: `npm run start`
|
||||||
|
|
||||||
|
Note: These commands will start a custom Next.js server with the schedulers enabled. You'll need to have an OpenAI API key set in your `.env.local` file for the session processing to work.
|
||||||
|
|
||||||
|
## Manual Processing
|
||||||
|
|
||||||
|
You can also manually process sessions by running the script:
|
||||||
|
|
||||||
|
```
|
||||||
|
node scripts/process_sessions.mjs
|
||||||
|
```
|
||||||
|
|
||||||
|
This will process all unprocessed sessions that have transcript content.
|
||||||
|
|
||||||
|
## Customization
|
||||||
|
|
||||||
|
The processing logic can be customized by modifying:
|
||||||
|
|
||||||
|
- `lib/processingScheduler.ts`: Contains the OpenAI processing logic
|
||||||
|
- `scripts/process_sessions.ts`: Standalone script for manual processing
|
||||||
228
docs/transcript-parsing-implementation.md
Normal file
228
docs/transcript-parsing-implementation.md
Normal file
@ -0,0 +1,228 @@
|
|||||||
|
# Transcript Parsing Implementation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Added structured message parsing to the LiveDash system, allowing transcripts to be broken down into individual messages with timestamps, roles, and content. This provides a much better user experience for viewing conversations.
|
||||||
|
|
||||||
|
## Database Changes
|
||||||
|
|
||||||
|
### New Message Table
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE Message (
|
||||||
|
id TEXT PRIMARY KEY DEFAULT (uuid()),
|
||||||
|
sessionId TEXT NOT NULL,
|
||||||
|
timestamp DATETIME NOT NULL,
|
||||||
|
role TEXT NOT NULL,
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
order INTEGER NOT NULL,
|
||||||
|
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (sessionId) REFERENCES Session(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX Message_sessionId_order_idx ON Message(sessionId, order);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Updated Session Table
|
||||||
|
|
||||||
|
- Added `messages` relation to Session model
|
||||||
|
- Sessions can now have both raw transcript content AND parsed messages
|
||||||
|
|
||||||
|
## New Components
|
||||||
|
|
||||||
|
### 1. Message Interface (`lib/types.ts`)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface Message {
|
||||||
|
id: string;
|
||||||
|
sessionId: string;
|
||||||
|
timestamp: Date;
|
||||||
|
role: string; // "User", "Assistant", "System", etc.
|
||||||
|
content: string;
|
||||||
|
order: number; // Order within the conversation (0, 1, 2, ...)
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Transcript Parser (`lib/transcriptParser.js`)
|
||||||
|
|
||||||
|
- **`parseChatLogToJSON(logString)`** - Parses raw transcript text into structured messages
|
||||||
|
- **`storeMessagesForSession(sessionId, messages)`** - Stores parsed messages in database
|
||||||
|
- **`processTranscriptForSession(sessionId, transcriptContent)`** - Complete processing for one session
|
||||||
|
- **`processAllUnparsedTranscripts()`** - Batch process all unparsed transcripts
|
||||||
|
- **`getMessagesForSession(sessionId)`** - Retrieve messages for a session
|
||||||
|
|
||||||
|
### 3. MessageViewer Component (`components/MessageViewer.tsx`)
|
||||||
|
|
||||||
|
- Chat-like interface for displaying parsed messages
|
||||||
|
- Color-coded by role (User: blue, Assistant: gray, System: yellow)
|
||||||
|
- Shows timestamps and message order
|
||||||
|
- Scrollable with conversation metadata
|
||||||
|
|
||||||
|
## Updated Components
|
||||||
|
|
||||||
|
### 1. Session API (`pages/api/dashboard/session/[id].ts`)
|
||||||
|
|
||||||
|
- Now includes parsed messages in session response
|
||||||
|
- Messages are ordered by `order` field (ascending)
|
||||||
|
|
||||||
|
### 2. Session Details Page (`app/dashboard/sessions/[id]/page.tsx`)
|
||||||
|
|
||||||
|
- Added MessageViewer component
|
||||||
|
- Shows both parsed messages AND raw transcript
|
||||||
|
- Prioritizes parsed messages when available
|
||||||
|
|
||||||
|
### 3. ChatSession Interface (`lib/types.ts`)
|
||||||
|
|
||||||
|
- Added optional `messages?: Message[]` field
|
||||||
|
|
||||||
|
## Parsing Logic
|
||||||
|
|
||||||
|
### Supported Format
|
||||||
|
|
||||||
|
The parser expects transcript format:
|
||||||
|
|
||||||
|
```
|
||||||
|
[DD.MM.YYYY HH:MM:SS] Role: Message content
|
||||||
|
[DD.MM.YYYY HH:MM:SS] User: Hello, I need help
|
||||||
|
[DD.MM.YYYY HH:MM:SS] Assistant: How can I help you today?
|
||||||
|
```
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **Multi-line support** - Messages can span multiple lines
|
||||||
|
- **Timestamp parsing** - Converts DD.MM.YYYY HH:MM:SS to ISO format
|
||||||
|
- **Role detection** - Extracts sender role from each message
|
||||||
|
- **Ordering** - Maintains conversation order with explicit order field
|
||||||
|
- **Sorting** - Messages sorted by timestamp, then by role (User before Assistant)
|
||||||
|
|
||||||
|
## Manual Commands
|
||||||
|
|
||||||
|
### New Commands Added
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Parse transcripts into structured messages
|
||||||
|
node scripts/manual-triggers.js parse
|
||||||
|
|
||||||
|
# Complete workflow: refresh → parse → process
|
||||||
|
node scripts/manual-triggers.js all
|
||||||
|
|
||||||
|
# Check status (now shows parsing info)
|
||||||
|
node scripts/manual-triggers.js status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Updated Commands
|
||||||
|
|
||||||
|
- **`status`** - Now shows transcript and parsing statistics
|
||||||
|
- **`all`** - New command that runs refresh → parse → process in sequence
|
||||||
|
|
||||||
|
## Workflow Integration
|
||||||
|
|
||||||
|
### Complete Processing Pipeline
|
||||||
|
|
||||||
|
1. **Session Refresh** - Fetch sessions from CSV, download transcripts
|
||||||
|
2. **Transcript Parsing** - Parse raw transcripts into structured messages
|
||||||
|
3. **AI Processing** - Process sessions with OpenAI for sentiment, categories, etc.
|
||||||
|
|
||||||
|
### Database States
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// After CSV fetch
|
||||||
|
{
|
||||||
|
transcriptContent: "raw text...",
|
||||||
|
messages: [], // Empty
|
||||||
|
processed: null
|
||||||
|
}
|
||||||
|
|
||||||
|
// After parsing
|
||||||
|
{
|
||||||
|
transcriptContent: "raw text...",
|
||||||
|
messages: [Message, Message, ...], // Parsed
|
||||||
|
processed: null
|
||||||
|
}
|
||||||
|
|
||||||
|
// After AI processing
|
||||||
|
{
|
||||||
|
transcriptContent: "raw text...",
|
||||||
|
messages: [Message, Message, ...], // Parsed
|
||||||
|
processed: true,
|
||||||
|
sentimentCategory: "positive",
|
||||||
|
summary: "Brief summary...",
|
||||||
|
// ... other AI fields
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## User Experience Improvements
|
||||||
|
|
||||||
|
### Before
|
||||||
|
|
||||||
|
- Only raw transcript text in a text area
|
||||||
|
- Difficult to follow conversation flow
|
||||||
|
- No clear distinction between speakers
|
||||||
|
|
||||||
|
### After
|
||||||
|
|
||||||
|
- **Chat-like interface** with message bubbles
|
||||||
|
- **Color-coded roles** for easy identification
|
||||||
|
- **Timestamps** for each message
|
||||||
|
- **Conversation metadata** (first/last message times)
|
||||||
|
- **Fallback to raw transcript** if parsing fails
|
||||||
|
- **Both views available** - structured AND raw
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Manual Testing Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check current status
|
||||||
|
node scripts/manual-triggers.js status
|
||||||
|
|
||||||
|
# Parse existing transcripts
|
||||||
|
node scripts/manual-triggers.js parse
|
||||||
|
|
||||||
|
# Full pipeline test
|
||||||
|
node scripts/manual-triggers.js all
|
||||||
|
```
|
||||||
|
|
||||||
|
### Expected Results
|
||||||
|
|
||||||
|
1. Sessions with transcript content get parsed into individual messages
|
||||||
|
2. Session detail pages show chat-like interface
|
||||||
|
3. Both parsed messages and raw transcript are available
|
||||||
|
4. No data loss - original transcript content preserved
|
||||||
|
|
||||||
|
## Technical Benefits
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
- **Indexed queries** - Messages indexed by sessionId and order
|
||||||
|
- **Efficient loading** - Only load messages when needed
|
||||||
|
- **Cascading deletes** - Messages automatically deleted with sessions
|
||||||
|
|
||||||
|
### Maintainability
|
||||||
|
|
||||||
|
- **Separation of concerns** - Parsing logic isolated in dedicated module
|
||||||
|
- **Type safety** - Full TypeScript support for Message interface
|
||||||
|
- **Error handling** - Graceful fallbacks when parsing fails
|
||||||
|
|
||||||
|
### Extensibility
|
||||||
|
|
||||||
|
- **Role flexibility** - Supports any role names (User, Assistant, System, etc.)
|
||||||
|
- **Content preservation** - Multi-line messages fully supported
|
||||||
|
- **Metadata ready** - Easy to add message-level metadata in future
|
||||||
|
|
||||||
|
## Migration Notes
|
||||||
|
|
||||||
|
### Existing Data
|
||||||
|
|
||||||
|
- **No data loss** - Original transcript content preserved
|
||||||
|
- **Backward compatibility** - Pages work with or without parsed messages
|
||||||
|
- **Gradual migration** - Can parse transcripts incrementally
|
||||||
|
|
||||||
|
### Database Migration
|
||||||
|
|
||||||
|
- New Message table created with foreign key constraints
|
||||||
|
- Existing Session table unchanged (only added relation)
|
||||||
|
- Index created for efficient message queries
|
||||||
|
|
||||||
|
This implementation provides a solid foundation for enhanced conversation analysis and user experience while maintaining full backward compatibility.
|
||||||
88
fix-import-status.ts
Normal file
88
fix-import-status.ts
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import { PrismaClient, ProcessingStage, ProcessingStatus } from '@prisma/client';
|
||||||
|
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
async function fixProcessingStatus() {
|
||||||
|
try {
|
||||||
|
console.log('=== FIXING PROCESSING STATUS (REFACTORED SYSTEM) ===\n');
|
||||||
|
|
||||||
|
// Check for any failed processing stages that might need retry
|
||||||
|
const failedSessions = await ProcessingStatusManager.getFailedSessions();
|
||||||
|
|
||||||
|
console.log(`Found ${failedSessions.length} failed processing stages`);
|
||||||
|
|
||||||
|
if (failedSessions.length > 0) {
|
||||||
|
console.log('\nFailed sessions by stage:');
|
||||||
|
const failuresByStage: Record<string, number> = {};
|
||||||
|
|
||||||
|
failedSessions.forEach(failure => {
|
||||||
|
failuresByStage[failure.stage] = (failuresByStage[failure.stage] || 0) + 1;
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.entries(failuresByStage).forEach(([stage, count]) => {
|
||||||
|
console.log(` ${stage}: ${count} failures`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Show sample failed sessions
|
||||||
|
console.log('\nSample failed sessions:');
|
||||||
|
failedSessions.slice(0, 5).forEach(failure => {
|
||||||
|
console.log(` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Ask if user wants to reset failed stages for retry
|
||||||
|
console.log('\nTo reset failed stages for retry, you can use:');
|
||||||
|
console.log('ProcessingStatusManager.resetStageForRetry(sessionId, stage)');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for sessions that might be stuck in IN_PROGRESS
|
||||||
|
const stuckSessions = await prisma.sessionProcessingStatus.findMany({
|
||||||
|
where: {
|
||||||
|
status: ProcessingStatus.IN_PROGRESS,
|
||||||
|
startedAt: {
|
||||||
|
lt: new Date(Date.now() - 30 * 60 * 1000) // Started more than 30 minutes ago
|
||||||
|
}
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
session: {
|
||||||
|
include: {
|
||||||
|
import: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (stuckSessions.length > 0) {
|
||||||
|
console.log(`\nFound ${stuckSessions.length} sessions stuck in IN_PROGRESS state:`);
|
||||||
|
stuckSessions.forEach(stuck => {
|
||||||
|
console.log(` ${stuck.session.import?.externalSessionId || stuck.sessionId}: ${stuck.stage} (started: ${stuck.startedAt})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('\nThese sessions may need to be reset to PENDING status for retry.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show current pipeline status
|
||||||
|
console.log('\n=== CURRENT PIPELINE STATUS ===');
|
||||||
|
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||||
|
|
||||||
|
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||||
|
|
||||||
|
for (const stage of stages) {
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
const pending = stageData.PENDING || 0;
|
||||||
|
const inProgress = stageData.IN_PROGRESS || 0;
|
||||||
|
const completed = stageData.COMPLETED || 0;
|
||||||
|
const failed = stageData.FAILED || 0;
|
||||||
|
const skipped = stageData.SKIPPED || 0;
|
||||||
|
|
||||||
|
console.log(`${stage}: ${completed} completed, ${pending} pending, ${inProgress} in progress, ${failed} failed, ${skipped} skipped`);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fixing processing status:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fixProcessingStatus();
|
||||||
@ -1,440 +1,41 @@
|
|||||||
// Fetches, parses, and returns chat session data for a company from a CSV URL
|
// Simplified CSV fetcher - fetches and parses CSV data without any processing
|
||||||
|
// Maps directly to SessionImport table fields
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import { parse } from "csv-parse/sync";
|
import { parse } from "csv-parse/sync";
|
||||||
import ISO6391 from "iso-639-1";
|
|
||||||
import countries from "i18n-iso-countries";
|
|
||||||
|
|
||||||
// Register locales for i18n-iso-countries
|
// Raw CSV data interface matching SessionImport schema
|
||||||
import enLocale from "i18n-iso-countries/langs/en.json" with { type: "json" };
|
interface RawSessionImport {
|
||||||
countries.registerLocale(enLocale);
|
externalSessionId: string;
|
||||||
|
startTimeRaw: string;
|
||||||
// This type is used internally for parsing the CSV records
|
endTimeRaw: string;
|
||||||
interface CSVRecord {
|
ipAddress: string | null;
|
||||||
session_id: string;
|
countryCode: string | null;
|
||||||
start_time: string;
|
language: string | null;
|
||||||
end_time?: string;
|
messagesSent: number | null;
|
||||||
ip_address?: string;
|
sentimentRaw: string | null;
|
||||||
country?: string;
|
escalatedRaw: string | null;
|
||||||
language?: string;
|
forwardedHrRaw: string | null;
|
||||||
messages_sent?: string;
|
fullTranscriptUrl: string | null;
|
||||||
sentiment?: string;
|
avgResponseTimeSeconds: number | null;
|
||||||
escalated?: string;
|
tokens: number | null;
|
||||||
forwarded_hr?: string;
|
tokensEur: number | null;
|
||||||
full_transcript_url?: string;
|
category: string | null;
|
||||||
avg_response_time?: string;
|
initialMessage: string | null;
|
||||||
tokens?: string;
|
|
||||||
tokens_eur?: string;
|
|
||||||
category?: string;
|
|
||||||
initial_msg?: string;
|
|
||||||
[key: string]: string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SessionData {
|
|
||||||
id: string;
|
|
||||||
sessionId: string;
|
|
||||||
startTime: Date;
|
|
||||||
endTime: Date | null;
|
|
||||||
ipAddress?: string;
|
|
||||||
country?: string | null; // Will store ISO 3166-1 alpha-2 country code or null/undefined
|
|
||||||
language?: string | null; // Will store ISO 639-1 language code or null/undefined
|
|
||||||
messagesSent: number;
|
|
||||||
sentiment: number | null;
|
|
||||||
escalated: boolean;
|
|
||||||
forwardedHr: boolean;
|
|
||||||
fullTranscriptUrl?: string | null;
|
|
||||||
avgResponseTime: number | null;
|
|
||||||
tokens: number;
|
|
||||||
tokensEur: number;
|
|
||||||
category?: string | null;
|
|
||||||
initialMsg?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts country names to ISO 3166-1 alpha-2 codes
|
* Fetches and parses CSV data from a URL without any processing
|
||||||
* @param countryStr Raw country string from CSV
|
* Maps CSV columns by position to SessionImport fields
|
||||||
* @returns ISO 3166-1 alpha-2 country code or null if not found
|
* @param url The CSV URL
|
||||||
|
* @param username Optional username for authentication
|
||||||
|
* @param password Optional password for authentication
|
||||||
|
* @returns Array of raw session import data
|
||||||
*/
|
*/
|
||||||
function getCountryCode(countryStr?: string): string | null | undefined {
|
|
||||||
if (countryStr === undefined) return undefined;
|
|
||||||
if (countryStr === null || countryStr === "") return null;
|
|
||||||
|
|
||||||
// Clean the input
|
|
||||||
const normalized = countryStr.trim();
|
|
||||||
if (!normalized) return null;
|
|
||||||
|
|
||||||
// Direct ISO code check (if already a 2-letter code)
|
|
||||||
if (normalized.length === 2 && normalized === normalized.toUpperCase()) {
|
|
||||||
return countries.isValid(normalized) ? normalized : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case for country codes used in the dataset
|
|
||||||
const countryMapping: Record<string, string> = {
|
|
||||||
BA: "BA", // Bosnia and Herzegovina
|
|
||||||
NL: "NL", // Netherlands
|
|
||||||
USA: "US", // United States
|
|
||||||
UK: "GB", // United Kingdom
|
|
||||||
GB: "GB", // Great Britain
|
|
||||||
Nederland: "NL",
|
|
||||||
Netherlands: "NL",
|
|
||||||
Netherland: "NL",
|
|
||||||
Holland: "NL",
|
|
||||||
Germany: "DE",
|
|
||||||
Deutschland: "DE",
|
|
||||||
Belgium: "BE",
|
|
||||||
België: "BE",
|
|
||||||
Belgique: "BE",
|
|
||||||
France: "FR",
|
|
||||||
Frankreich: "FR",
|
|
||||||
"United States": "US",
|
|
||||||
"United States of America": "US",
|
|
||||||
Bosnia: "BA",
|
|
||||||
"Bosnia and Herzegovina": "BA",
|
|
||||||
"Bosnia & Herzegovina": "BA",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check mapping
|
|
||||||
if (normalized in countryMapping) {
|
|
||||||
return countryMapping[normalized];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to get the code from the country name (in English)
|
|
||||||
try {
|
|
||||||
const code = countries.getAlpha2Code(normalized, "en");
|
|
||||||
if (code) return code;
|
|
||||||
} catch (error) {
|
|
||||||
process.stderr.write(
|
|
||||||
`[CSV] Error converting country name to code: ${normalized} - ${error}\n`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If all else fails, return null
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts language names to ISO 639-1 codes
|
|
||||||
* @param languageStr Raw language string from CSV
|
|
||||||
* @returns ISO 639-1 language code or null if not found
|
|
||||||
*/
|
|
||||||
function getLanguageCode(languageStr?: string): string | null | undefined {
|
|
||||||
if (languageStr === undefined) return undefined;
|
|
||||||
if (languageStr === null || languageStr === "") return null;
|
|
||||||
|
|
||||||
// Clean the input
|
|
||||||
const normalized = languageStr.trim();
|
|
||||||
if (!normalized) return null;
|
|
||||||
|
|
||||||
// Direct ISO code check (if already a 2-letter code)
|
|
||||||
if (normalized.length === 2 && normalized === normalized.toLowerCase()) {
|
|
||||||
return ISO6391.validate(normalized) ? normalized : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case mappings
|
|
||||||
const languageMapping: Record<string, string> = {
|
|
||||||
english: "en",
|
|
||||||
English: "en",
|
|
||||||
dutch: "nl",
|
|
||||||
Dutch: "nl",
|
|
||||||
nederlands: "nl",
|
|
||||||
Nederlands: "nl",
|
|
||||||
nl: "nl",
|
|
||||||
bosnian: "bs",
|
|
||||||
Bosnian: "bs",
|
|
||||||
turkish: "tr",
|
|
||||||
Turkish: "tr",
|
|
||||||
german: "de",
|
|
||||||
German: "de",
|
|
||||||
deutsch: "de",
|
|
||||||
Deutsch: "de",
|
|
||||||
french: "fr",
|
|
||||||
French: "fr",
|
|
||||||
français: "fr",
|
|
||||||
Français: "fr",
|
|
||||||
spanish: "es",
|
|
||||||
Spanish: "es",
|
|
||||||
español: "es",
|
|
||||||
Español: "es",
|
|
||||||
italian: "it",
|
|
||||||
Italian: "it",
|
|
||||||
italiano: "it",
|
|
||||||
Italiano: "it",
|
|
||||||
nizozemski: "nl", // "Dutch" in some Slavic languages
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check mapping
|
|
||||||
if (normalized in languageMapping) {
|
|
||||||
return languageMapping[normalized];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to get code using the ISO6391 library
|
|
||||||
try {
|
|
||||||
const code = ISO6391.getCode(normalized);
|
|
||||||
if (code) return code;
|
|
||||||
} catch (error) {
|
|
||||||
process.stderr.write(
|
|
||||||
`[CSV] Error converting language name to code: ${normalized} - ${error}\n`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
// If all else fails, return null
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Normalizes category values to standard groups
|
|
||||||
* @param categoryStr The raw category string from CSV
|
|
||||||
* @returns A normalized category string
|
|
||||||
*/
|
|
||||||
function normalizeCategory(categoryStr?: string): string | null {
|
|
||||||
if (!categoryStr) return null;
|
|
||||||
|
|
||||||
const normalized = categoryStr.toLowerCase().trim();
|
|
||||||
|
|
||||||
// Define category groups using keywords
|
|
||||||
const categoryMapping: Record<string, string[]> = {
|
|
||||||
Onboarding: [
|
|
||||||
"onboarding",
|
|
||||||
"start",
|
|
||||||
"begin",
|
|
||||||
"new",
|
|
||||||
"orientation",
|
|
||||||
"welcome",
|
|
||||||
"intro",
|
|
||||||
"getting started",
|
|
||||||
"documents",
|
|
||||||
"documenten",
|
|
||||||
"first day",
|
|
||||||
"eerste dag",
|
|
||||||
],
|
|
||||||
"General Information": [
|
|
||||||
"general",
|
|
||||||
"algemeen",
|
|
||||||
"info",
|
|
||||||
"information",
|
|
||||||
"informatie",
|
|
||||||
"question",
|
|
||||||
"vraag",
|
|
||||||
"inquiry",
|
|
||||||
"chat",
|
|
||||||
"conversation",
|
|
||||||
"gesprek",
|
|
||||||
"talk",
|
|
||||||
],
|
|
||||||
Greeting: [
|
|
||||||
"greeting",
|
|
||||||
"greet",
|
|
||||||
"hello",
|
|
||||||
"hi",
|
|
||||||
"hey",
|
|
||||||
"welcome",
|
|
||||||
"hallo",
|
|
||||||
"hoi",
|
|
||||||
"greetings",
|
|
||||||
],
|
|
||||||
"HR & Payroll": [
|
|
||||||
"salary",
|
|
||||||
"salaris",
|
|
||||||
"pay",
|
|
||||||
"payroll",
|
|
||||||
"loon",
|
|
||||||
"loonstrook",
|
|
||||||
"hr",
|
|
||||||
"human resources",
|
|
||||||
"benefits",
|
|
||||||
"vacation",
|
|
||||||
"leave",
|
|
||||||
"verlof",
|
|
||||||
"maaltijdvergoeding",
|
|
||||||
"vergoeding",
|
|
||||||
],
|
|
||||||
"Schedules & Hours": [
|
|
||||||
"schedule",
|
|
||||||
"hours",
|
|
||||||
"tijd",
|
|
||||||
"time",
|
|
||||||
"roster",
|
|
||||||
"rooster",
|
|
||||||
"planning",
|
|
||||||
"shift",
|
|
||||||
"dienst",
|
|
||||||
"working hours",
|
|
||||||
"werktijden",
|
|
||||||
"openingstijden",
|
|
||||||
],
|
|
||||||
"Role & Responsibilities": [
|
|
||||||
"role",
|
|
||||||
"job",
|
|
||||||
"function",
|
|
||||||
"functie",
|
|
||||||
"task",
|
|
||||||
"taak",
|
|
||||||
"responsibilities",
|
|
||||||
"leidinggevende",
|
|
||||||
"manager",
|
|
||||||
"teamleider",
|
|
||||||
"supervisor",
|
|
||||||
"team",
|
|
||||||
"lead",
|
|
||||||
],
|
|
||||||
"Technical Support": [
|
|
||||||
"technical",
|
|
||||||
"tech",
|
|
||||||
"support",
|
|
||||||
"laptop",
|
|
||||||
"computer",
|
|
||||||
"system",
|
|
||||||
"systeem",
|
|
||||||
"it",
|
|
||||||
"software",
|
|
||||||
"hardware",
|
|
||||||
],
|
|
||||||
Offboarding: [
|
|
||||||
"offboarding",
|
|
||||||
"leave",
|
|
||||||
"exit",
|
|
||||||
"quit",
|
|
||||||
"resign",
|
|
||||||
"resignation",
|
|
||||||
"ontslag",
|
|
||||||
"vertrek",
|
|
||||||
"afsluiting",
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Try to match the category using keywords
|
|
||||||
for (const [category, keywords] of Object.entries(categoryMapping)) {
|
|
||||||
if (keywords.some((keyword) => normalized.includes(keyword))) {
|
|
||||||
return category;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no match, return "Other"
|
|
||||||
return "Other";
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts sentiment string values to numeric scores
|
|
||||||
* @param sentimentStr The sentiment string from the CSV
|
|
||||||
* @returns A numeric score representing the sentiment
|
|
||||||
*/
|
|
||||||
function mapSentimentToScore(sentimentStr?: string): number | null {
|
|
||||||
if (!sentimentStr) return null;
|
|
||||||
|
|
||||||
// Convert to lowercase for case-insensitive matching
|
|
||||||
const sentiment = sentimentStr.toLowerCase();
|
|
||||||
|
|
||||||
// Map sentiment strings to numeric values on a scale from -1 to 2
|
|
||||||
const sentimentMap: Record<string, number> = {
|
|
||||||
happy: 1.0,
|
|
||||||
excited: 1.5,
|
|
||||||
positive: 0.8,
|
|
||||||
neutral: 0.0,
|
|
||||||
playful: 0.7,
|
|
||||||
negative: -0.8,
|
|
||||||
angry: -1.0,
|
|
||||||
sad: -0.7,
|
|
||||||
frustrated: -0.9,
|
|
||||||
positief: 0.8, // Dutch
|
|
||||||
neutraal: 0.0, // Dutch
|
|
||||||
negatief: -0.8, // Dutch
|
|
||||||
positivo: 0.8, // Spanish/Italian
|
|
||||||
neutro: 0.0, // Spanish/Italian
|
|
||||||
negativo: -0.8, // Spanish/Italian
|
|
||||||
yes: 0.5, // For any "yes" sentiment
|
|
||||||
no: -0.5, // For any "no" sentiment
|
|
||||||
};
|
|
||||||
|
|
||||||
return sentimentMap[sentiment] !== undefined
|
|
||||||
? sentimentMap[sentiment]
|
|
||||||
: isNaN(parseFloat(sentiment))
|
|
||||||
? null
|
|
||||||
: parseFloat(sentiment);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if a string value should be considered as boolean true
|
|
||||||
* @param value The string value to check
|
|
||||||
* @returns True if the string indicates a positive/true value
|
|
||||||
*/
|
|
||||||
function isTruthyValue(value?: string): boolean {
|
|
||||||
if (!value) return false;
|
|
||||||
|
|
||||||
const truthyValues = [
|
|
||||||
"1",
|
|
||||||
"true",
|
|
||||||
"yes",
|
|
||||||
"y",
|
|
||||||
"ja",
|
|
||||||
"si",
|
|
||||||
"oui",
|
|
||||||
"да",
|
|
||||||
"да",
|
|
||||||
"はい",
|
|
||||||
];
|
|
||||||
|
|
||||||
return truthyValues.includes(value.toLowerCase());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely parses a date string into a Date object.
|
|
||||||
* Handles potential errors and various formats, prioritizing D-M-YYYY HH:MM:SS.
|
|
||||||
* @param dateStr The date string to parse.
|
|
||||||
* @returns A Date object or null if parsing fails.
|
|
||||||
*/
|
|
||||||
function safeParseDate(dateStr?: string): Date | null {
|
|
||||||
if (!dateStr) return null;
|
|
||||||
|
|
||||||
// Try to parse D-M-YYYY HH:MM:SS format (with hyphens or dots)
|
|
||||||
const dateTimeRegex =
|
|
||||||
/^(\d{1,2})[.-](\d{1,2})[.-](\d{4}) (\d{1,2}):(\d{1,2}):(\d{1,2})$/;
|
|
||||||
const match = dateStr.match(dateTimeRegex);
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
const day = match[1];
|
|
||||||
const month = match[2];
|
|
||||||
const year = match[3];
|
|
||||||
const hour = match[4];
|
|
||||||
const minute = match[5];
|
|
||||||
const second = match[6];
|
|
||||||
|
|
||||||
// Reformat to YYYY-MM-DDTHH:MM:SS (ISO-like, but local time)
|
|
||||||
// Ensure month and day are two digits
|
|
||||||
const formattedDateStr = `${year}-${month.padStart(2, "0")}-${day.padStart(2, "0")}T${hour.padStart(2, "0")}:${minute.padStart(2, "0")}:${second.padStart(2, "0")}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const date = new Date(formattedDateStr);
|
|
||||||
// Basic validation: check if the constructed date is valid
|
|
||||||
if (!isNaN(date.getTime())) {
|
|
||||||
// console.log(`[safeParseDate] Parsed from D-M-YYYY: ${dateStr} -> ${formattedDateStr} -> ${date.toISOString()}`);
|
|
||||||
return date;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.warn(
|
|
||||||
`[safeParseDate] Error parsing reformatted string ${formattedDateStr} from ${dateStr}:`,
|
|
||||||
e
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback for other potential formats (e.g., direct ISO 8601) or if the primary parse failed
|
|
||||||
try {
|
|
||||||
const parsedDate = new Date(dateStr);
|
|
||||||
if (!isNaN(parsedDate.getTime())) {
|
|
||||||
// console.log(`[safeParseDate] Parsed with fallback: ${dateStr} -> ${parsedDate.toISOString()}`);
|
|
||||||
return parsedDate;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.warn(`[safeParseDate] Error parsing with fallback ${dateStr}:`, e);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.warn(`Failed to parse date string: ${dateStr}`);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function fetchAndParseCsv(
|
export async function fetchAndParseCsv(
|
||||||
url: string,
|
url: string,
|
||||||
username?: string,
|
username?: string,
|
||||||
password?: string
|
password?: string
|
||||||
): Promise<Partial<SessionData>[]> {
|
): Promise<RawSessionImport[]> {
|
||||||
const authHeader =
|
const authHeader =
|
||||||
username && password
|
username && password
|
||||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||||
@ -443,56 +44,39 @@ export async function fetchAndParseCsv(
|
|||||||
const res = await fetch(url, {
|
const res = await fetch(url, {
|
||||||
headers: authHeader ? { Authorization: authHeader } : {},
|
headers: authHeader ? { Authorization: authHeader } : {},
|
||||||
});
|
});
|
||||||
if (!res.ok) throw new Error("Failed to fetch CSV: " + res.statusText);
|
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`Failed to fetch CSV: ${res.status} ${res.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
const text = await res.text();
|
const text = await res.text();
|
||||||
|
|
||||||
// Parse without expecting headers, using known order
|
// Parse CSV without headers, using positional column mapping
|
||||||
const records: CSVRecord[] = parse(text, {
|
const records: string[][] = parse(text, {
|
||||||
delimiter: ",",
|
delimiter: ",",
|
||||||
columns: [
|
from_line: 1, // Start from first line (no headers)
|
||||||
"session_id",
|
|
||||||
"start_time",
|
|
||||||
"end_time",
|
|
||||||
"ip_address",
|
|
||||||
"country",
|
|
||||||
"language",
|
|
||||||
"messages_sent",
|
|
||||||
"sentiment",
|
|
||||||
"escalated",
|
|
||||||
"forwarded_hr",
|
|
||||||
"full_transcript_url",
|
|
||||||
"avg_response_time",
|
|
||||||
"tokens",
|
|
||||||
"tokens_eur",
|
|
||||||
"category",
|
|
||||||
"initial_msg",
|
|
||||||
],
|
|
||||||
from_line: 1,
|
|
||||||
relax_column_count: true,
|
relax_column_count: true,
|
||||||
skip_empty_lines: true,
|
skip_empty_lines: true,
|
||||||
trim: true,
|
trim: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Coerce types for relevant columns
|
// Map CSV columns by position to SessionImport fields
|
||||||
return records.map((r) => ({
|
return records.map((row) => ({
|
||||||
id: r.session_id,
|
externalSessionId: row[0] || "",
|
||||||
startTime: safeParseDate(r.start_time) || new Date(), // Fallback to current date if invalid
|
startTimeRaw: row[1] || "",
|
||||||
endTime: safeParseDate(r.end_time),
|
endTimeRaw: row[2] || "",
|
||||||
ipAddress: r.ip_address,
|
ipAddress: row[3] || null,
|
||||||
country: getCountryCode(r.country),
|
countryCode: row[4] || null,
|
||||||
language: getLanguageCode(r.language),
|
language: row[5] || null,
|
||||||
messagesSent: Number(r.messages_sent) || 0,
|
messagesSent: row[6] ? parseInt(row[6], 10) || null : null,
|
||||||
sentiment: mapSentimentToScore(r.sentiment),
|
sentimentRaw: row[7] || null,
|
||||||
escalated: isTruthyValue(r.escalated),
|
escalatedRaw: row[8] || null,
|
||||||
forwardedHr: isTruthyValue(r.forwarded_hr),
|
forwardedHrRaw: row[9] || null,
|
||||||
fullTranscriptUrl: r.full_transcript_url,
|
fullTranscriptUrl: row[10] || null,
|
||||||
avgResponseTime: r.avg_response_time
|
avgResponseTimeSeconds: row[11] ? parseFloat(row[11]) || null : null,
|
||||||
? parseFloat(r.avg_response_time)
|
tokens: row[12] ? parseInt(row[12], 10) || null : null,
|
||||||
: null,
|
tokensEur: row[13] ? parseFloat(row[13]) || null : null,
|
||||||
tokens: Number(r.tokens) || 0,
|
category: row[14] || null,
|
||||||
tokensEur: r.tokens_eur ? parseFloat(r.tokens_eur) : 0,
|
initialMessage: row[15] || null,
|
||||||
category: normalizeCategory(r.category),
|
|
||||||
initialMsg: r.initial_msg,
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|||||||
147
lib/env.ts
Normal file
147
lib/env.ts
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
// Centralized environment variable management
|
||||||
|
import { readFileSync } from "fs";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse environment variable value by removing quotes, comments, and trimming whitespace
|
||||||
|
*/
|
||||||
|
function parseEnvValue(value: string | undefined): string {
|
||||||
|
if (!value) return '';
|
||||||
|
|
||||||
|
// Trim whitespace
|
||||||
|
let cleaned = value.trim();
|
||||||
|
|
||||||
|
// Remove inline comments (everything after #)
|
||||||
|
const commentIndex = cleaned.indexOf('#');
|
||||||
|
if (commentIndex !== -1) {
|
||||||
|
cleaned = cleaned.substring(0, commentIndex).trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove surrounding quotes (both single and double)
|
||||||
|
if ((cleaned.startsWith('"') && cleaned.endsWith('"')) ||
|
||||||
|
(cleaned.startsWith("'") && cleaned.endsWith("'"))) {
|
||||||
|
cleaned = cleaned.slice(1, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return cleaned;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse integer with fallback to default value
|
||||||
|
*/
|
||||||
|
function parseIntWithDefault(value: string | undefined, defaultValue: number): number {
|
||||||
|
const cleaned = parseEnvValue(value);
|
||||||
|
if (!cleaned) return defaultValue;
|
||||||
|
|
||||||
|
const parsed = parseInt(cleaned, 10);
|
||||||
|
return isNaN(parsed) ? defaultValue : parsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load environment variables from .env.local
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const envPath = join(__dirname, '..', '.env.local');
|
||||||
|
|
||||||
|
// Load .env.local if it exists
|
||||||
|
try {
|
||||||
|
const envFile = readFileSync(envPath, 'utf8');
|
||||||
|
const envVars = envFile.split('\n').filter(line => line.trim() && !line.startsWith('#'));
|
||||||
|
|
||||||
|
envVars.forEach(line => {
|
||||||
|
const [key, ...valueParts] = line.split('=');
|
||||||
|
if (key && valueParts.length > 0) {
|
||||||
|
const rawValue = valueParts.join('=');
|
||||||
|
const cleanedValue = parseEnvValue(rawValue);
|
||||||
|
if (!process.env[key.trim()]) {
|
||||||
|
process.env[key.trim()] = cleanedValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
// Silently fail if .env.local doesn't exist
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Typed environment variables with defaults
|
||||||
|
*/
|
||||||
|
export const env = {
|
||||||
|
// NextAuth
|
||||||
|
NEXTAUTH_URL: parseEnvValue(process.env.NEXTAUTH_URL) || 'http://localhost:3000',
|
||||||
|
NEXTAUTH_SECRET: parseEnvValue(process.env.NEXTAUTH_SECRET) || '',
|
||||||
|
NODE_ENV: parseEnvValue(process.env.NODE_ENV) || 'development',
|
||||||
|
|
||||||
|
// OpenAI
|
||||||
|
OPENAI_API_KEY: parseEnvValue(process.env.OPENAI_API_KEY) || '',
|
||||||
|
|
||||||
|
// Scheduler Configuration
|
||||||
|
SCHEDULER_ENABLED: parseEnvValue(process.env.SCHEDULER_ENABLED) === 'true',
|
||||||
|
CSV_IMPORT_INTERVAL: parseEnvValue(process.env.CSV_IMPORT_INTERVAL) || '*/15 * * * *',
|
||||||
|
IMPORT_PROCESSING_INTERVAL: parseEnvValue(process.env.IMPORT_PROCESSING_INTERVAL) || '*/5 * * * *',
|
||||||
|
IMPORT_PROCESSING_BATCH_SIZE: parseIntWithDefault(process.env.IMPORT_PROCESSING_BATCH_SIZE, 50),
|
||||||
|
SESSION_PROCESSING_INTERVAL: parseEnvValue(process.env.SESSION_PROCESSING_INTERVAL) || '0 * * * *',
|
||||||
|
SESSION_PROCESSING_BATCH_SIZE: parseIntWithDefault(process.env.SESSION_PROCESSING_BATCH_SIZE, 0),
|
||||||
|
SESSION_PROCESSING_CONCURRENCY: parseIntWithDefault(process.env.SESSION_PROCESSING_CONCURRENCY, 5),
|
||||||
|
|
||||||
|
// Server
|
||||||
|
PORT: parseIntWithDefault(process.env.PORT, 3000),
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate required environment variables
|
||||||
|
*/
|
||||||
|
export function validateEnv(): { valid: boolean; errors: string[] } {
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
if (!env.NEXTAUTH_SECRET) {
|
||||||
|
errors.push('NEXTAUTH_SECRET is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!env.OPENAI_API_KEY && env.NODE_ENV === 'production') {
|
||||||
|
errors.push('OPENAI_API_KEY is required in production');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: errors.length === 0,
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get scheduler configuration from environment variables
|
||||||
|
*/
|
||||||
|
export function getSchedulerConfig() {
|
||||||
|
return {
|
||||||
|
enabled: env.SCHEDULER_ENABLED,
|
||||||
|
csvImport: {
|
||||||
|
interval: env.CSV_IMPORT_INTERVAL,
|
||||||
|
},
|
||||||
|
importProcessing: {
|
||||||
|
interval: env.IMPORT_PROCESSING_INTERVAL,
|
||||||
|
batchSize: env.IMPORT_PROCESSING_BATCH_SIZE,
|
||||||
|
},
|
||||||
|
sessionProcessing: {
|
||||||
|
interval: env.SESSION_PROCESSING_INTERVAL,
|
||||||
|
batchSize: env.SESSION_PROCESSING_BATCH_SIZE,
|
||||||
|
concurrency: env.SESSION_PROCESSING_CONCURRENCY,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log environment configuration (safe for production)
|
||||||
|
*/
|
||||||
|
export function logEnvConfig(): void {
|
||||||
|
console.log('[Environment] Configuration:');
|
||||||
|
console.log(` NODE_ENV: ${env.NODE_ENV}`);
|
||||||
|
console.log(` NEXTAUTH_URL: ${env.NEXTAUTH_URL}`);
|
||||||
|
console.log(` SCHEDULER_ENABLED: ${env.SCHEDULER_ENABLED}`);
|
||||||
|
console.log(` PORT: ${env.PORT}`);
|
||||||
|
|
||||||
|
if (env.SCHEDULER_ENABLED) {
|
||||||
|
console.log(' Scheduler intervals:');
|
||||||
|
console.log(` CSV Import: ${env.CSV_IMPORT_INTERVAL}`);
|
||||||
|
console.log(` Import Processing: ${env.IMPORT_PROCESSING_INTERVAL}`);
|
||||||
|
console.log(` Session Processing: ${env.SESSION_PROCESSING_INTERVAL}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
357
lib/importProcessor.ts
Normal file
357
lib/importProcessor.ts
Normal file
@ -0,0 +1,357 @@
|
|||||||
|
// SessionImport to Session processor
|
||||||
|
import { PrismaClient, SentimentCategory, SessionCategory, ProcessingStage } from "@prisma/client";
|
||||||
|
import { getSchedulerConfig } from "./env";
|
||||||
|
import { fetchTranscriptContent, isValidTranscriptUrl } from "./transcriptFetcher";
|
||||||
|
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||||
|
import cron from "node-cron";
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse European date format (DD.MM.YYYY HH:mm:ss) to JavaScript Date
|
||||||
|
*/
|
||||||
|
function parseEuropeanDate(dateStr: string): Date {
|
||||||
|
if (!dateStr || typeof dateStr !== 'string') {
|
||||||
|
throw new Error(`Invalid date string: ${dateStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle format: "DD.MM.YYYY HH:mm:ss"
|
||||||
|
const [datePart, timePart] = dateStr.trim().split(' ');
|
||||||
|
|
||||||
|
if (!datePart || !timePart) {
|
||||||
|
throw new Error(`Invalid date format: ${dateStr}. Expected format: DD.MM.YYYY HH:mm:ss`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [day, month, year] = datePart.split('.');
|
||||||
|
|
||||||
|
if (!day || !month || !year) {
|
||||||
|
throw new Error(`Invalid date part: ${datePart}. Expected format: DD.MM.YYYY`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to ISO format: YYYY-MM-DD HH:mm:ss
|
||||||
|
const isoDateStr = `${year}-${month.padStart(2, '0')}-${day.padStart(2, '0')} ${timePart}`;
|
||||||
|
const date = new Date(isoDateStr);
|
||||||
|
|
||||||
|
if (isNaN(date.getTime())) {
|
||||||
|
throw new Error(`Failed to parse date: ${dateStr} -> ${isoDateStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to parse sentiment from raw string (fallback only)
|
||||||
|
*/
|
||||||
|
function parseFallbackSentiment(sentimentRaw: string | null): SentimentCategory | null {
|
||||||
|
if (!sentimentRaw) return null;
|
||||||
|
|
||||||
|
const sentimentStr = sentimentRaw.toLowerCase();
|
||||||
|
if (sentimentStr.includes('positive')) {
|
||||||
|
return SentimentCategory.POSITIVE;
|
||||||
|
} else if (sentimentStr.includes('negative')) {
|
||||||
|
return SentimentCategory.NEGATIVE;
|
||||||
|
} else {
|
||||||
|
return SentimentCategory.NEUTRAL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to parse boolean from raw string (fallback only)
|
||||||
|
*/
|
||||||
|
function parseFallbackBoolean(rawValue: string | null): boolean | null {
|
||||||
|
if (!rawValue) return null;
|
||||||
|
return ['true', '1', 'yes', 'escalated', 'forwarded'].includes(rawValue.toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse transcript content into Message records
|
||||||
|
*/
|
||||||
|
async function parseTranscriptIntoMessages(sessionId: string, transcriptContent: string): Promise<void> {
|
||||||
|
// Clear existing messages for this session
|
||||||
|
await prisma.message.deleteMany({
|
||||||
|
where: { sessionId }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Split transcript into lines and parse each message
|
||||||
|
const lines = transcriptContent.split('\n').filter(line => line.trim());
|
||||||
|
let order = 0;
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmedLine = line.trim();
|
||||||
|
if (!trimmedLine) continue;
|
||||||
|
|
||||||
|
// Try to parse different formats:
|
||||||
|
// Format 1: "User: message" or "Assistant: message"
|
||||||
|
// Format 2: "[timestamp] User: message" or "[timestamp] Assistant: message"
|
||||||
|
|
||||||
|
let role = 'unknown';
|
||||||
|
let content = trimmedLine;
|
||||||
|
let timestamp: Date | null = null;
|
||||||
|
|
||||||
|
// Check for timestamp format: [DD.MM.YYYY HH:mm:ss] Role: content
|
||||||
|
const timestampMatch = trimmedLine.match(/^\[([^\]]+)\]\s*(.+)$/);
|
||||||
|
if (timestampMatch) {
|
||||||
|
try {
|
||||||
|
timestamp = parseEuropeanDate(timestampMatch[1]);
|
||||||
|
content = timestampMatch[2];
|
||||||
|
} catch (error) {
|
||||||
|
// If timestamp parsing fails, treat the whole line as content
|
||||||
|
content = trimmedLine;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract role and message content
|
||||||
|
const roleMatch = content.match(/^(User|Assistant|System):\s*(.*)$/i);
|
||||||
|
if (roleMatch) {
|
||||||
|
role = roleMatch[1].toLowerCase();
|
||||||
|
content = roleMatch[2].trim();
|
||||||
|
} else {
|
||||||
|
// If no role prefix found, try to infer from context or use 'unknown'
|
||||||
|
role = 'unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip empty content
|
||||||
|
if (!content) continue;
|
||||||
|
|
||||||
|
// Create message record
|
||||||
|
await prisma.message.create({
|
||||||
|
data: {
|
||||||
|
sessionId,
|
||||||
|
timestamp,
|
||||||
|
role,
|
||||||
|
content,
|
||||||
|
order,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
order++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[Import Processor] ✓ Parsed ${order} messages for session ${sessionId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a single SessionImport record into a Session record
|
||||||
|
* Uses new unified processing status tracking
|
||||||
|
*/
|
||||||
|
async function processSingleImport(importRecord: any): Promise<{ success: boolean; error?: string }> {
|
||||||
|
let sessionId: string | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Parse dates using European format parser
|
||||||
|
const startTime = parseEuropeanDate(importRecord.startTimeRaw);
|
||||||
|
const endTime = parseEuropeanDate(importRecord.endTimeRaw);
|
||||||
|
|
||||||
|
console.log(`[Import Processor] Processing ${importRecord.externalSessionId}: ${startTime.toISOString()} - ${endTime.toISOString()}`);
|
||||||
|
|
||||||
|
// Create or update Session record with MINIMAL processing
|
||||||
|
const session = await prisma.session.upsert({
|
||||||
|
where: {
|
||||||
|
importId: importRecord.id,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
// Direct copies (minimal processing)
|
||||||
|
ipAddress: importRecord.ipAddress,
|
||||||
|
country: importRecord.countryCode, // Keep as country code
|
||||||
|
fullTranscriptUrl: importRecord.fullTranscriptUrl,
|
||||||
|
avgResponseTime: importRecord.avgResponseTimeSeconds,
|
||||||
|
initialMsg: importRecord.initialMessage,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
companyId: importRecord.companyId,
|
||||||
|
importId: importRecord.id,
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
// Direct copies (minimal processing)
|
||||||
|
ipAddress: importRecord.ipAddress,
|
||||||
|
country: importRecord.countryCode, // Keep as country code
|
||||||
|
fullTranscriptUrl: importRecord.fullTranscriptUrl,
|
||||||
|
avgResponseTime: importRecord.avgResponseTimeSeconds,
|
||||||
|
initialMsg: importRecord.initialMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sessionId = session.id;
|
||||||
|
|
||||||
|
// Initialize processing status for this session
|
||||||
|
await ProcessingStatusManager.initializeSession(sessionId);
|
||||||
|
|
||||||
|
// Mark CSV_IMPORT as completed
|
||||||
|
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.CSV_IMPORT);
|
||||||
|
|
||||||
|
// Handle transcript fetching
|
||||||
|
let transcriptContent = importRecord.rawTranscriptContent;
|
||||||
|
|
||||||
|
if (!transcriptContent && importRecord.fullTranscriptUrl && isValidTranscriptUrl(importRecord.fullTranscriptUrl)) {
|
||||||
|
await ProcessingStatusManager.startStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH);
|
||||||
|
|
||||||
|
console.log(`[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...`);
|
||||||
|
|
||||||
|
// Get company credentials for transcript fetching
|
||||||
|
const company = await prisma.company.findUnique({
|
||||||
|
where: { id: importRecord.companyId },
|
||||||
|
select: { csvUsername: true, csvPassword: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const transcriptResult = await fetchTranscriptContent(
|
||||||
|
importRecord.fullTranscriptUrl,
|
||||||
|
company?.csvUsername || undefined,
|
||||||
|
company?.csvPassword || undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
if (transcriptResult.success) {
|
||||||
|
transcriptContent = transcriptResult.content;
|
||||||
|
console.log(`[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)`);
|
||||||
|
|
||||||
|
// Update the import record with the fetched content
|
||||||
|
await prisma.sessionImport.update({
|
||||||
|
where: { id: importRecord.id },
|
||||||
|
data: { rawTranscriptContent: transcriptContent },
|
||||||
|
});
|
||||||
|
|
||||||
|
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
|
||||||
|
contentLength: transcriptContent?.length || 0,
|
||||||
|
url: importRecord.fullTranscriptUrl
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log(`[Import Processor] ⚠️ Failed to fetch transcript for ${importRecord.externalSessionId}: ${transcriptResult.error}`);
|
||||||
|
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, transcriptResult.error || 'Unknown error');
|
||||||
|
}
|
||||||
|
} else if (!importRecord.fullTranscriptUrl) {
|
||||||
|
// No transcript URL available - skip this stage
|
||||||
|
await ProcessingStatusManager.skipStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, 'No transcript URL provided');
|
||||||
|
} else {
|
||||||
|
// Transcript already fetched
|
||||||
|
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
|
||||||
|
contentLength: transcriptContent?.length || 0,
|
||||||
|
source: 'already_fetched'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle session creation (parse messages)
|
||||||
|
await ProcessingStatusManager.startStage(sessionId, ProcessingStage.SESSION_CREATION);
|
||||||
|
|
||||||
|
if (transcriptContent) {
|
||||||
|
await parseTranscriptIntoMessages(sessionId, transcriptContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.SESSION_CREATION, {
|
||||||
|
hasTranscript: !!transcriptContent,
|
||||||
|
transcriptLength: transcriptContent?.length || 0
|
||||||
|
});
|
||||||
|
|
||||||
|
return { success: true };
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
|
|
||||||
|
// Mark the current stage as failed if we have a sessionId
|
||||||
|
if (sessionId) {
|
||||||
|
// Determine which stage failed based on the error
|
||||||
|
if (errorMessage.includes('transcript') || errorMessage.includes('fetch')) {
|
||||||
|
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, errorMessage);
|
||||||
|
} else if (errorMessage.includes('message') || errorMessage.includes('parse')) {
|
||||||
|
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.SESSION_CREATION, errorMessage);
|
||||||
|
} else {
|
||||||
|
// General failure - mark CSV_IMPORT as failed
|
||||||
|
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.CSV_IMPORT, errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process unprocessed SessionImport records into Session records
|
||||||
|
* Uses new processing status system to find imports that need processing
|
||||||
|
*/
|
||||||
|
export async function processQueuedImports(batchSize: number = 50): Promise<void> {
|
||||||
|
console.log('[Import Processor] Starting to process unprocessed imports...');
|
||||||
|
|
||||||
|
let totalSuccessCount = 0;
|
||||||
|
let totalErrorCount = 0;
|
||||||
|
let batchNumber = 1;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
// Find SessionImports that don't have a corresponding Session yet
|
||||||
|
const unprocessedImports = await prisma.sessionImport.findMany({
|
||||||
|
where: {
|
||||||
|
session: null, // No session created yet
|
||||||
|
},
|
||||||
|
take: batchSize,
|
||||||
|
orderBy: {
|
||||||
|
createdAt: 'asc', // Process oldest first
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (unprocessedImports.length === 0) {
|
||||||
|
if (batchNumber === 1) {
|
||||||
|
console.log('[Import Processor] No unprocessed imports found');
|
||||||
|
} else {
|
||||||
|
console.log(`[Import Processor] All batches completed. Total: ${totalSuccessCount} successful, ${totalErrorCount} failed`);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[Import Processor] Processing batch ${batchNumber}: ${unprocessedImports.length} imports...`);
|
||||||
|
|
||||||
|
let batchSuccessCount = 0;
|
||||||
|
let batchErrorCount = 0;
|
||||||
|
|
||||||
|
// Process each import in this batch
|
||||||
|
for (const importRecord of unprocessedImports) {
|
||||||
|
const result = await processSingleImport(importRecord);
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
batchSuccessCount++;
|
||||||
|
totalSuccessCount++;
|
||||||
|
console.log(`[Import Processor] ✓ Processed import ${importRecord.externalSessionId}`);
|
||||||
|
} else {
|
||||||
|
batchErrorCount++;
|
||||||
|
totalErrorCount++;
|
||||||
|
console.log(`[Import Processor] ✗ Failed to process import ${importRecord.externalSessionId}: ${result.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[Import Processor] Batch ${batchNumber} completed: ${batchSuccessCount} successful, ${batchErrorCount} failed`);
|
||||||
|
batchNumber++;
|
||||||
|
|
||||||
|
// If this batch was smaller than the batch size, we're done
|
||||||
|
if (unprocessedImports.length < batchSize) {
|
||||||
|
console.log(`[Import Processor] All batches completed. Total: ${totalSuccessCount} successful, ${totalErrorCount} failed`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the import processing scheduler
|
||||||
|
*/
|
||||||
|
export function startImportProcessingScheduler(): void {
|
||||||
|
const config = getSchedulerConfig();
|
||||||
|
|
||||||
|
if (!config.enabled) {
|
||||||
|
console.log('[Import Processing Scheduler] Disabled via configuration');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use a more frequent interval for import processing (every 5 minutes by default)
|
||||||
|
const interval = process.env.IMPORT_PROCESSING_INTERVAL || '*/5 * * * *';
|
||||||
|
const batchSize = parseInt(process.env.IMPORT_PROCESSING_BATCH_SIZE || '50', 10);
|
||||||
|
|
||||||
|
console.log(`[Import Processing Scheduler] Starting with interval: ${interval}`);
|
||||||
|
console.log(`[Import Processing Scheduler] Batch size: ${batchSize}`);
|
||||||
|
|
||||||
|
cron.schedule(interval, async () => {
|
||||||
|
try {
|
||||||
|
await processQueuedImports(batchSize);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`[Import Processing Scheduler] Error: ${error}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
112
lib/metrics.ts
112
lib/metrics.ts
@ -7,6 +7,7 @@ import {
|
|||||||
CountryMetrics, // Added CountryMetrics
|
CountryMetrics, // Added CountryMetrics
|
||||||
MetricsResult,
|
MetricsResult,
|
||||||
WordCloudWord, // Added WordCloudWord
|
WordCloudWord, // Added WordCloudWord
|
||||||
|
TopQuestion, // Added TopQuestion
|
||||||
} from "./types";
|
} from "./types";
|
||||||
|
|
||||||
interface CompanyConfig {
|
interface CompanyConfig {
|
||||||
@ -344,12 +345,28 @@ export function sessionMetrics(
|
|||||||
let sentimentPositiveCount = 0;
|
let sentimentPositiveCount = 0;
|
||||||
let sentimentNeutralCount = 0;
|
let sentimentNeutralCount = 0;
|
||||||
let sentimentNegativeCount = 0;
|
let sentimentNegativeCount = 0;
|
||||||
let totalTokens = 0;
|
const totalTokens = 0;
|
||||||
let totalTokensEur = 0;
|
const totalTokensEur = 0;
|
||||||
const wordCounts: { [key: string]: number } = {};
|
const wordCounts: { [key: string]: number } = {};
|
||||||
let alerts = 0;
|
let alerts = 0;
|
||||||
|
|
||||||
|
// New metrics variables
|
||||||
|
const hourlySessionCounts: { [hour: string]: number } = {};
|
||||||
|
let resolvedChatsCount = 0;
|
||||||
|
const questionCounts: { [question: string]: number } = {};
|
||||||
|
|
||||||
for (const session of sessions) {
|
for (const session of sessions) {
|
||||||
|
// Track hourly usage for peak time calculation
|
||||||
|
if (session.startTime) {
|
||||||
|
const hour = new Date(session.startTime).getHours();
|
||||||
|
const hourKey = `${hour.toString().padStart(2, '0')}:00`;
|
||||||
|
hourlySessionCounts[hourKey] = (hourlySessionCounts[hourKey] || 0) + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count resolved chats (sessions that have ended and are not escalated)
|
||||||
|
if (session.endTime && !session.escalated) {
|
||||||
|
resolvedChatsCount++;
|
||||||
|
}
|
||||||
// Unique Users: Prefer non-empty ipAddress, fallback to non-empty sessionId
|
// Unique Users: Prefer non-empty ipAddress, fallback to non-empty sessionId
|
||||||
let identifierAdded = false;
|
let identifierAdded = false;
|
||||||
if (session.ipAddress && session.ipAddress.trim() !== "") {
|
if (session.ipAddress && session.ipAddress.trim() !== "") {
|
||||||
@ -436,41 +453,25 @@ export function sessionMetrics(
|
|||||||
if (session.escalated) escalatedCount++;
|
if (session.escalated) escalatedCount++;
|
||||||
if (session.forwardedHr) forwardedHrCount++;
|
if (session.forwardedHr) forwardedHrCount++;
|
||||||
|
|
||||||
// Sentiment
|
// Sentiment (now using enum values)
|
||||||
if (session.sentiment !== undefined && session.sentiment !== null) {
|
if (session.sentiment !== undefined && session.sentiment !== null) {
|
||||||
// Example thresholds, adjust as needed
|
if (session.sentiment === "POSITIVE") sentimentPositiveCount++;
|
||||||
if (session.sentiment > 0.3) sentimentPositiveCount++;
|
else if (session.sentiment === "NEGATIVE") sentimentNegativeCount++;
|
||||||
else if (session.sentiment < -0.3) sentimentNegativeCount++;
|
else if (session.sentiment === "NEUTRAL") sentimentNeutralCount++;
|
||||||
else sentimentNeutralCount++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sentiment Alert Check
|
// Sentiment Alert Check (simplified for enum)
|
||||||
if (
|
if (
|
||||||
companyConfig.sentimentAlert !== undefined &&
|
companyConfig.sentimentAlert !== undefined &&
|
||||||
session.sentiment !== undefined &&
|
session.sentiment === "NEGATIVE"
|
||||||
session.sentiment !== null &&
|
|
||||||
session.sentiment < companyConfig.sentimentAlert
|
|
||||||
) {
|
) {
|
||||||
alerts++;
|
alerts++;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tokens
|
|
||||||
if (session.tokens !== undefined && session.tokens !== null) {
|
|
||||||
totalTokens += session.tokens;
|
|
||||||
}
|
|
||||||
if (session.tokensEur !== undefined && session.tokensEur !== null) {
|
|
||||||
totalTokensEur += session.tokensEur;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Daily metrics
|
// Daily metrics
|
||||||
const day = new Date(session.startTime).toISOString().split("T")[0];
|
const day = new Date(session.startTime).toISOString().split("T")[0];
|
||||||
byDay[day] = (byDay[day] || 0) + 1; // Sessions per day
|
byDay[day] = (byDay[day] || 0) + 1; // Sessions per day
|
||||||
if (session.tokens !== undefined && session.tokens !== null) {
|
// Note: tokens and tokensEur are not available in the new schema
|
||||||
tokensByDay[day] = (tokensByDay[day] || 0) + session.tokens;
|
|
||||||
}
|
|
||||||
if (session.tokensEur !== undefined && session.tokensEur !== null) {
|
|
||||||
tokensCostByDay[day] = (tokensCostByDay[day] || 0) + session.tokensEur;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Category metrics
|
// Category metrics
|
||||||
if (session.category) {
|
if (session.category) {
|
||||||
@ -487,6 +488,34 @@ export function sessionMetrics(
|
|||||||
byCountry[session.country] = (byCountry[session.country] || 0) + 1;
|
byCountry[session.country] = (byCountry[session.country] || 0) + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract questions from session
|
||||||
|
const extractQuestions = () => {
|
||||||
|
// 1. Extract questions from user messages (if available)
|
||||||
|
if (session.messages) {
|
||||||
|
session.messages
|
||||||
|
.filter(msg => msg.role === 'User')
|
||||||
|
.forEach(msg => {
|
||||||
|
const content = msg.content.trim();
|
||||||
|
// Simple heuristic: if message ends with ? or contains question words, treat as question
|
||||||
|
if (content.endsWith('?') ||
|
||||||
|
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(content)) {
|
||||||
|
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Extract questions from initial message as fallback
|
||||||
|
if (session.initialMsg) {
|
||||||
|
const content = session.initialMsg.trim();
|
||||||
|
if (content.endsWith('?') ||
|
||||||
|
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(content)) {
|
||||||
|
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
extractQuestions();
|
||||||
|
|
||||||
// Word Cloud Data (from initial message and transcript content)
|
// Word Cloud Data (from initial message and transcript content)
|
||||||
const processTextForWordCloud = (text: string | undefined | null) => {
|
const processTextForWordCloud = (text: string | undefined | null) => {
|
||||||
if (!text) return;
|
if (!text) return;
|
||||||
@ -506,7 +535,8 @@ export function sessionMetrics(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
processTextForWordCloud(session.initialMsg);
|
processTextForWordCloud(session.initialMsg);
|
||||||
processTextForWordCloud(session.transcriptContent);
|
// Note: transcriptContent is not available in ChatSession type
|
||||||
|
// Could be added later if transcript parsing is implemented
|
||||||
}
|
}
|
||||||
|
|
||||||
const uniqueUsers = uniqueUserIds.size;
|
const uniqueUsers = uniqueUserIds.size;
|
||||||
@ -547,6 +577,30 @@ export function sessionMetrics(
|
|||||||
mockPreviousPeriodData.avgResponseTime
|
mockPreviousPeriodData.avgResponseTime
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Calculate new metrics
|
||||||
|
|
||||||
|
// 1. Average Daily Costs (euros)
|
||||||
|
const avgDailyCosts = numDaysWithSessions > 0 ? totalTokensEur / numDaysWithSessions : 0;
|
||||||
|
|
||||||
|
// 2. Peak Usage Time
|
||||||
|
let peakUsageTime = "N/A";
|
||||||
|
if (Object.keys(hourlySessionCounts).length > 0) {
|
||||||
|
const peakHour = Object.entries(hourlySessionCounts)
|
||||||
|
.sort(([, a], [, b]) => b - a)[0][0];
|
||||||
|
const peakHourNum = parseInt(peakHour.split(':')[0]);
|
||||||
|
const endHour = (peakHourNum + 1) % 24;
|
||||||
|
peakUsageTime = `${peakHour}-${endHour.toString().padStart(2, '0')}:00`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Resolved Chats Percentage
|
||||||
|
const resolvedChatsPercentage = totalSessions > 0 ? (resolvedChatsCount / totalSessions) * 100 : 0;
|
||||||
|
|
||||||
|
// 4. Top 5 Asked Questions
|
||||||
|
const topQuestions: TopQuestion[] = Object.entries(questionCounts)
|
||||||
|
.sort(([, a], [, b]) => b - a)
|
||||||
|
.slice(0, 5) // Top 5 questions
|
||||||
|
.map(([question, count]) => ({ question, count }));
|
||||||
|
|
||||||
// console.log("Debug metrics calculation:", {
|
// console.log("Debug metrics calculation:", {
|
||||||
// totalSessionDuration,
|
// totalSessionDuration,
|
||||||
// validSessionsForDuration,
|
// validSessionsForDuration,
|
||||||
@ -585,5 +639,11 @@ export function sessionMetrics(
|
|||||||
lastUpdated: Date.now(),
|
lastUpdated: Date.now(),
|
||||||
totalSessionDuration,
|
totalSessionDuration,
|
||||||
validSessionsForDuration,
|
validSessionsForDuration,
|
||||||
|
|
||||||
|
// New metrics
|
||||||
|
avgDailyCosts,
|
||||||
|
peakUsageTime,
|
||||||
|
resolvedChatsPercentage,
|
||||||
|
topQuestions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
620
lib/processingScheduler.ts
Normal file
620
lib/processingScheduler.ts
Normal file
@ -0,0 +1,620 @@
|
|||||||
|
// Enhanced session processing scheduler with AI cost tracking and question management
|
||||||
|
import cron from "node-cron";
|
||||||
|
import { PrismaClient, SentimentCategory, SessionCategory, ProcessingStage } from "@prisma/client";
|
||||||
|
import fetch from "node-fetch";
|
||||||
|
import { getSchedulerConfig } from "./schedulerConfig";
|
||||||
|
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||||
|
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||||
|
const DEFAULT_MODEL = process.env.OPENAI_MODEL || "gpt-4o";
|
||||||
|
|
||||||
|
const USD_TO_EUR_RATE = 0.85; // Update periodically or fetch from API
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get company's default AI model
|
||||||
|
*/
|
||||||
|
async function getCompanyAIModel(companyId: string): Promise<string> {
|
||||||
|
const companyModel = await prisma.companyAIModel.findFirst({
|
||||||
|
where: {
|
||||||
|
companyId,
|
||||||
|
isDefault: true,
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
aiModel: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return companyModel?.aiModel.name || DEFAULT_MODEL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current pricing for an AI model
|
||||||
|
*/
|
||||||
|
async function getCurrentModelPricing(modelName: string): Promise<{
|
||||||
|
promptTokenCost: number;
|
||||||
|
completionTokenCost: number;
|
||||||
|
} | null> {
|
||||||
|
const model = await prisma.aIModel.findUnique({
|
||||||
|
where: { name: modelName },
|
||||||
|
include: {
|
||||||
|
pricing: {
|
||||||
|
where: {
|
||||||
|
effectiveFrom: { lte: new Date() },
|
||||||
|
OR: [
|
||||||
|
{ effectiveUntil: null },
|
||||||
|
{ effectiveUntil: { gte: new Date() } }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
orderBy: { effectiveFrom: 'desc' },
|
||||||
|
take: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!model || model.pricing.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const pricing = model.pricing[0];
|
||||||
|
return {
|
||||||
|
promptTokenCost: pricing.promptTokenCost,
|
||||||
|
completionTokenCost: pricing.completionTokenCost,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProcessedData {
|
||||||
|
language: string;
|
||||||
|
sentiment: "POSITIVE" | "NEUTRAL" | "NEGATIVE";
|
||||||
|
escalated: boolean;
|
||||||
|
forwarded_hr: boolean;
|
||||||
|
category: "SCHEDULE_HOURS" | "LEAVE_VACATION" | "SICK_LEAVE_RECOVERY" | "SALARY_COMPENSATION" | "CONTRACT_HOURS" | "ONBOARDING" | "OFFBOARDING" | "WORKWEAR_STAFF_PASS" | "TEAM_CONTACTS" | "PERSONAL_QUESTIONS" | "ACCESS_LOGIN" | "SOCIAL_QUESTIONS" | "UNRECOGNIZED_OTHER";
|
||||||
|
questions: string[];
|
||||||
|
summary: string;
|
||||||
|
session_id: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProcessingResult {
|
||||||
|
sessionId: string;
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record AI processing request with detailed token tracking
|
||||||
|
*/
|
||||||
|
async function recordAIProcessingRequest(
|
||||||
|
sessionId: string,
|
||||||
|
openaiResponse: any,
|
||||||
|
processingType: string = 'session_analysis'
|
||||||
|
): Promise<void> {
|
||||||
|
const usage = openaiResponse.usage;
|
||||||
|
const model = openaiResponse.model;
|
||||||
|
|
||||||
|
// Get current pricing from database
|
||||||
|
const pricing = await getCurrentModelPricing(model);
|
||||||
|
|
||||||
|
// Fallback pricing if not found in database
|
||||||
|
const fallbackPricing = {
|
||||||
|
promptTokenCost: 0.00001, // $10.00 per 1M tokens (gpt-4-turbo rate)
|
||||||
|
completionTokenCost: 0.00003, // $30.00 per 1M tokens
|
||||||
|
};
|
||||||
|
|
||||||
|
const finalPricing = pricing || fallbackPricing;
|
||||||
|
|
||||||
|
const promptCost = usage.prompt_tokens * finalPricing.promptTokenCost;
|
||||||
|
const completionCost = usage.completion_tokens * finalPricing.completionTokenCost;
|
||||||
|
const totalCostUsd = promptCost + completionCost;
|
||||||
|
const totalCostEur = totalCostUsd * USD_TO_EUR_RATE;
|
||||||
|
|
||||||
|
await prisma.aIProcessingRequest.create({
|
||||||
|
data: {
|
||||||
|
sessionId,
|
||||||
|
openaiRequestId: openaiResponse.id,
|
||||||
|
model: openaiResponse.model,
|
||||||
|
serviceTier: openaiResponse.service_tier,
|
||||||
|
systemFingerprint: openaiResponse.system_fingerprint,
|
||||||
|
|
||||||
|
promptTokens: usage.prompt_tokens,
|
||||||
|
completionTokens: usage.completion_tokens,
|
||||||
|
totalTokens: usage.total_tokens,
|
||||||
|
|
||||||
|
// Detailed breakdown
|
||||||
|
cachedTokens: usage.prompt_tokens_details?.cached_tokens || null,
|
||||||
|
audioTokensPrompt: usage.prompt_tokens_details?.audio_tokens || null,
|
||||||
|
reasoningTokens: usage.completion_tokens_details?.reasoning_tokens || null,
|
||||||
|
audioTokensCompletion: usage.completion_tokens_details?.audio_tokens || null,
|
||||||
|
acceptedPredictionTokens: usage.completion_tokens_details?.accepted_prediction_tokens || null,
|
||||||
|
rejectedPredictionTokens: usage.completion_tokens_details?.rejected_prediction_tokens || null,
|
||||||
|
|
||||||
|
promptTokenCost: finalPricing.promptTokenCost,
|
||||||
|
completionTokenCost: finalPricing.completionTokenCost,
|
||||||
|
totalCostEur,
|
||||||
|
|
||||||
|
processingType,
|
||||||
|
success: true,
|
||||||
|
completedAt: new Date(),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record failed AI processing request
|
||||||
|
*/
|
||||||
|
async function recordFailedAIProcessingRequest(
|
||||||
|
sessionId: string,
|
||||||
|
processingType: string,
|
||||||
|
errorMessage: string
|
||||||
|
): Promise<void> {
|
||||||
|
await prisma.aIProcessingRequest.create({
|
||||||
|
data: {
|
||||||
|
sessionId,
|
||||||
|
model: 'unknown',
|
||||||
|
promptTokens: 0,
|
||||||
|
completionTokens: 0,
|
||||||
|
totalTokens: 0,
|
||||||
|
promptTokenCost: 0,
|
||||||
|
completionTokenCost: 0,
|
||||||
|
totalCostEur: 0,
|
||||||
|
processingType,
|
||||||
|
success: false,
|
||||||
|
errorMessage,
|
||||||
|
completedAt: new Date(),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process questions into separate Question and SessionQuestion tables
|
||||||
|
*/
|
||||||
|
async function processQuestions(sessionId: string, questions: string[]): Promise<void> {
|
||||||
|
// Clear existing questions for this session
|
||||||
|
await prisma.sessionQuestion.deleteMany({
|
||||||
|
where: { sessionId }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process each question
|
||||||
|
for (let index = 0; index < questions.length; index++) {
|
||||||
|
const questionText = questions[index];
|
||||||
|
if (!questionText.trim()) continue; // Skip empty questions
|
||||||
|
|
||||||
|
// Find or create question
|
||||||
|
const question = await prisma.question.upsert({
|
||||||
|
where: { content: questionText.trim() },
|
||||||
|
create: { content: questionText.trim() },
|
||||||
|
update: {}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Link to session
|
||||||
|
await prisma.sessionQuestion.create({
|
||||||
|
data: {
|
||||||
|
sessionId,
|
||||||
|
questionId: question.id,
|
||||||
|
order: index
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate messagesSent from actual Message records
|
||||||
|
*/
|
||||||
|
async function calculateMessagesSent(sessionId: string): Promise<number> {
|
||||||
|
const userMessageCount = await prisma.message.count({
|
||||||
|
where: {
|
||||||
|
sessionId,
|
||||||
|
role: { in: ['user', 'User'] } // Handle both cases
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return userMessageCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate endTime from latest Message timestamp
|
||||||
|
*/
|
||||||
|
async function calculateEndTime(sessionId: string, fallbackEndTime: Date): Promise<Date> {
|
||||||
|
const latestMessage = await prisma.message.findFirst({
|
||||||
|
where: { sessionId },
|
||||||
|
orderBy: { timestamp: 'desc' }
|
||||||
|
});
|
||||||
|
|
||||||
|
return latestMessage?.timestamp || fallbackEndTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes a session transcript using OpenAI API
|
||||||
|
*/
|
||||||
|
async function processTranscriptWithOpenAI(sessionId: string, transcript: string, companyId: string): Promise<ProcessedData> {
|
||||||
|
if (!OPENAI_API_KEY) {
|
||||||
|
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get company's AI model
|
||||||
|
const aiModel = await getCompanyAIModel(companyId);
|
||||||
|
|
||||||
|
// Updated system message with exact enum values
|
||||||
|
const systemMessage = `
|
||||||
|
You are an AI assistant tasked with analyzing chat transcripts.
|
||||||
|
Extract the following information from the transcript and return it in EXACT JSON format:
|
||||||
|
|
||||||
|
{
|
||||||
|
"language": "ISO 639-1 code (e.g., 'en', 'nl', 'de')",
|
||||||
|
"sentiment": "POSITIVE|NEUTRAL|NEGATIVE",
|
||||||
|
"escalated": boolean,
|
||||||
|
"forwarded_hr": boolean,
|
||||||
|
"category": "SCHEDULE_HOURS|LEAVE_VACATION|SICK_LEAVE_RECOVERY|SALARY_COMPENSATION|CONTRACT_HOURS|ONBOARDING|OFFBOARDING|WORKWEAR_STAFF_PASS|TEAM_CONTACTS|PERSONAL_QUESTIONS|ACCESS_LOGIN|SOCIAL_QUESTIONS|UNRECOGNIZED_OTHER",
|
||||||
|
"questions": ["question 1", "question 2", ...],
|
||||||
|
"summary": "brief summary (10-300 chars)",
|
||||||
|
"session_id": "${sessionId}"
|
||||||
|
}
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
- language: Primary language used by the user (ISO 639-1 code)
|
||||||
|
- sentiment: Overall emotional tone of the conversation
|
||||||
|
- escalated: Was the issue escalated to a supervisor/manager?
|
||||||
|
- forwarded_hr: Was HR contact mentioned or provided?
|
||||||
|
- category: Best fitting category for the main topic (use exact enum values above)
|
||||||
|
- questions: Up to 5 paraphrased user questions (in English)
|
||||||
|
- summary: Brief conversation summary (10-300 characters)
|
||||||
|
|
||||||
|
IMPORTANT: Use EXACT enum values as specified above.
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(OPENAI_API_URL, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: `Bearer ${OPENAI_API_KEY}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model: aiModel, // Use company's configured AI model
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: systemMessage,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: transcript,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
temperature: 0.3, // Lower temperature for more consistent results
|
||||||
|
response_format: { type: "json_object" },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const openaiResponse: any = await response.json();
|
||||||
|
|
||||||
|
// Record the AI processing request for cost tracking
|
||||||
|
await recordAIProcessingRequest(sessionId, openaiResponse, 'session_analysis');
|
||||||
|
|
||||||
|
const processedData = JSON.parse(openaiResponse.choices[0].message.content);
|
||||||
|
|
||||||
|
// Validate the response against our expected schema
|
||||||
|
validateOpenAIResponse(processedData);
|
||||||
|
|
||||||
|
return processedData;
|
||||||
|
} catch (error) {
|
||||||
|
// Record failed request
|
||||||
|
await recordFailedAIProcessingRequest(
|
||||||
|
sessionId,
|
||||||
|
'session_analysis',
|
||||||
|
error instanceof Error ? error.message : String(error)
|
||||||
|
);
|
||||||
|
|
||||||
|
process.stderr.write(`Error processing transcript with OpenAI: ${error}\n`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the OpenAI response against our expected schema
|
||||||
|
*/
|
||||||
|
function validateOpenAIResponse(data: any): void {
|
||||||
|
const requiredFields = [
|
||||||
|
"language", "sentiment", "escalated", "forwarded_hr",
|
||||||
|
"category", "questions", "summary", "session_id"
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!(field in data)) {
|
||||||
|
throw new Error(`Missing required field: ${field}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate field types and values
|
||||||
|
if (typeof data.language !== "string" || !/^[a-z]{2}$/.test(data.language)) {
|
||||||
|
throw new Error("Invalid language format. Expected ISO 639-1 code (e.g., 'en')");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!["POSITIVE", "NEUTRAL", "NEGATIVE"].includes(data.sentiment)) {
|
||||||
|
throw new Error("Invalid sentiment. Expected 'POSITIVE', 'NEUTRAL', or 'NEGATIVE'");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.escalated !== "boolean") {
|
||||||
|
throw new Error("Invalid escalated. Expected boolean");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.forwarded_hr !== "boolean") {
|
||||||
|
throw new Error("Invalid forwarded_hr. Expected boolean");
|
||||||
|
}
|
||||||
|
|
||||||
|
const validCategories = [
|
||||||
|
"SCHEDULE_HOURS", "LEAVE_VACATION", "SICK_LEAVE_RECOVERY", "SALARY_COMPENSATION",
|
||||||
|
"CONTRACT_HOURS", "ONBOARDING", "OFFBOARDING", "WORKWEAR_STAFF_PASS",
|
||||||
|
"TEAM_CONTACTS", "PERSONAL_QUESTIONS", "ACCESS_LOGIN", "SOCIAL_QUESTIONS",
|
||||||
|
"UNRECOGNIZED_OTHER"
|
||||||
|
];
|
||||||
|
|
||||||
|
if (!validCategories.includes(data.category)) {
|
||||||
|
throw new Error(`Invalid category. Expected one of: ${validCategories.join(", ")}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(data.questions)) {
|
||||||
|
throw new Error("Invalid questions. Expected array of strings");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.summary !== "string" || data.summary.length < 10 || data.summary.length > 300) {
|
||||||
|
throw new Error("Invalid summary. Expected string between 10-300 characters");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.session_id !== "string") {
|
||||||
|
throw new Error("Invalid session_id. Expected string");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a single session
|
||||||
|
*/
|
||||||
|
async function processSingleSession(session: any): Promise<ProcessingResult> {
|
||||||
|
if (session.messages.length === 0) {
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: false,
|
||||||
|
error: "Session has no messages",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Mark AI analysis as started
|
||||||
|
await ProcessingStatusManager.startStage(session.id, ProcessingStage.AI_ANALYSIS);
|
||||||
|
|
||||||
|
// Convert messages back to transcript format for OpenAI processing
|
||||||
|
const transcript = session.messages
|
||||||
|
.map((msg: any) =>
|
||||||
|
`[${new Date(msg.timestamp)
|
||||||
|
.toLocaleString("en-GB", {
|
||||||
|
day: "2-digit",
|
||||||
|
month: "2-digit",
|
||||||
|
year: "numeric",
|
||||||
|
hour: "2-digit",
|
||||||
|
minute: "2-digit",
|
||||||
|
second: "2-digit",
|
||||||
|
})
|
||||||
|
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
||||||
|
)
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
const processedData = await processTranscriptWithOpenAI(session.id, transcript, session.companyId);
|
||||||
|
|
||||||
|
// Calculate messagesSent from actual Message records
|
||||||
|
const messagesSent = await calculateMessagesSent(session.id);
|
||||||
|
|
||||||
|
// Calculate endTime from latest Message timestamp
|
||||||
|
const calculatedEndTime = await calculateEndTime(session.id, session.endTime);
|
||||||
|
|
||||||
|
// Update the session with processed data
|
||||||
|
await prisma.session.update({
|
||||||
|
where: { id: session.id },
|
||||||
|
data: {
|
||||||
|
language: processedData.language,
|
||||||
|
messagesSent: messagesSent, // Calculated from Messages, not AI
|
||||||
|
endTime: calculatedEndTime, // Use calculated endTime if different
|
||||||
|
sentiment: processedData.sentiment as SentimentCategory,
|
||||||
|
escalated: processedData.escalated,
|
||||||
|
forwardedHr: processedData.forwarded_hr,
|
||||||
|
category: processedData.category as SessionCategory,
|
||||||
|
summary: processedData.summary,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mark AI analysis as completed
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.AI_ANALYSIS, {
|
||||||
|
language: processedData.language,
|
||||||
|
sentiment: processedData.sentiment,
|
||||||
|
category: processedData.category,
|
||||||
|
questionsCount: processedData.questions.length
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start question extraction stage
|
||||||
|
await ProcessingStatusManager.startStage(session.id, ProcessingStage.QUESTION_EXTRACTION);
|
||||||
|
|
||||||
|
// Process questions into separate tables
|
||||||
|
await processQuestions(session.id, processedData.questions);
|
||||||
|
|
||||||
|
// Mark question extraction as completed
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.QUESTION_EXTRACTION, {
|
||||||
|
questionsProcessed: processedData.questions.length
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: true,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
// Mark AI analysis as failed
|
||||||
|
await ProcessingStatusManager.failStage(
|
||||||
|
session.id,
|
||||||
|
ProcessingStage.AI_ANALYSIS,
|
||||||
|
error instanceof Error ? error.message : String(error)
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process sessions in parallel with concurrency limit
|
||||||
|
*/
|
||||||
|
async function processSessionsInParallel(sessions: any[], maxConcurrency: number = 5): Promise<ProcessingResult[]> {
|
||||||
|
const results: Promise<ProcessingResult>[] = [];
|
||||||
|
const executing: Promise<ProcessingResult>[] = [];
|
||||||
|
|
||||||
|
for (const session of sessions) {
|
||||||
|
const promise = processSingleSession(session).then((result) => {
|
||||||
|
process.stdout.write(
|
||||||
|
result.success
|
||||||
|
? `[ProcessingScheduler] ✓ Successfully processed session ${result.sessionId}\n`
|
||||||
|
: `[ProcessingScheduler] ✗ Failed to process session ${result.sessionId}: ${result.error}\n`
|
||||||
|
);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
results.push(promise);
|
||||||
|
executing.push(promise);
|
||||||
|
|
||||||
|
if (executing.length >= maxConcurrency) {
|
||||||
|
await Promise.race(executing);
|
||||||
|
const completedIndex = executing.findIndex(p => p === promise);
|
||||||
|
if (completedIndex !== -1) {
|
||||||
|
executing.splice(completedIndex, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.all(results);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process unprocessed sessions using the new processing status system
|
||||||
|
*/
|
||||||
|
export async function processUnprocessedSessions(batchSize: number | null = null, maxConcurrency: number = 5): Promise<void> {
|
||||||
|
process.stdout.write("[ProcessingScheduler] Starting to process sessions needing AI analysis...\n");
|
||||||
|
|
||||||
|
// Get sessions that need AI processing using the new status system
|
||||||
|
const sessionsNeedingAI = await ProcessingStatusManager.getSessionsNeedingProcessing(
|
||||||
|
ProcessingStage.AI_ANALYSIS,
|
||||||
|
batchSize || 50
|
||||||
|
);
|
||||||
|
|
||||||
|
if (sessionsNeedingAI.length === 0) {
|
||||||
|
process.stdout.write("[ProcessingScheduler] No sessions found requiring AI processing.\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get session IDs that need processing
|
||||||
|
const sessionIds = sessionsNeedingAI.map(statusRecord => statusRecord.sessionId);
|
||||||
|
|
||||||
|
// Fetch full session data with messages
|
||||||
|
const sessionsToProcess = await prisma.session.findMany({
|
||||||
|
where: {
|
||||||
|
id: { in: sessionIds }
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
messages: {
|
||||||
|
orderBy: { order: "asc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Filter to only sessions that have messages
|
||||||
|
const sessionsWithMessages = sessionsToProcess.filter(
|
||||||
|
(session: any) => session.messages && session.messages.length > 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (sessionsWithMessages.length === 0) {
|
||||||
|
process.stdout.write("[ProcessingScheduler] No sessions with messages found requiring processing.\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
`[ProcessingScheduler] Found ${sessionsWithMessages.length} sessions to process (max concurrency: ${maxConcurrency}).\n`
|
||||||
|
);
|
||||||
|
|
||||||
|
const startTime = Date.now();
|
||||||
|
const results = await processSessionsInParallel(sessionsWithMessages, maxConcurrency);
|
||||||
|
const endTime = Date.now();
|
||||||
|
|
||||||
|
const successCount = results.filter((r) => r.success).length;
|
||||||
|
const errorCount = results.filter((r) => !r.success).length;
|
||||||
|
|
||||||
|
process.stdout.write("[ProcessingScheduler] Session processing complete.\n");
|
||||||
|
process.stdout.write(`[ProcessingScheduler] Successfully processed: ${successCount} sessions.\n`);
|
||||||
|
process.stdout.write(`[ProcessingScheduler] Failed to process: ${errorCount} sessions.\n`);
|
||||||
|
process.stdout.write(`[ProcessingScheduler] Total processing time: ${((endTime - startTime) / 1000).toFixed(2)}s\n`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get total AI processing costs for reporting
|
||||||
|
*/
|
||||||
|
export async function getAIProcessingCosts(): Promise<{
|
||||||
|
totalCostEur: number;
|
||||||
|
totalTokens: number;
|
||||||
|
requestCount: number;
|
||||||
|
successfulRequests: number;
|
||||||
|
failedRequests: number;
|
||||||
|
}> {
|
||||||
|
const result = await prisma.aIProcessingRequest.aggregate({
|
||||||
|
_sum: {
|
||||||
|
totalCostEur: true,
|
||||||
|
totalTokens: true,
|
||||||
|
},
|
||||||
|
_count: {
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const successfulRequests = await prisma.aIProcessingRequest.count({
|
||||||
|
where: { success: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
const failedRequests = await prisma.aIProcessingRequest.count({
|
||||||
|
where: { success: false }
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalCostEur: result._sum.totalCostEur || 0,
|
||||||
|
totalTokens: result._sum.totalTokens || 0,
|
||||||
|
requestCount: result._count.id || 0,
|
||||||
|
successfulRequests,
|
||||||
|
failedRequests,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the processing scheduler with configurable settings
|
||||||
|
*/
|
||||||
|
export function startProcessingScheduler(): void {
|
||||||
|
const config = getSchedulerConfig();
|
||||||
|
|
||||||
|
if (!config.enabled) {
|
||||||
|
console.log('[Processing Scheduler] Disabled via configuration');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[Processing Scheduler] Starting with interval: ${config.sessionProcessing.interval}`);
|
||||||
|
console.log(`[Processing Scheduler] Batch size: ${config.sessionProcessing.batchSize === 0 ? 'unlimited' : config.sessionProcessing.batchSize}`);
|
||||||
|
console.log(`[Processing Scheduler] Concurrency: ${config.sessionProcessing.concurrency}`);
|
||||||
|
|
||||||
|
cron.schedule(config.sessionProcessing.interval, async () => {
|
||||||
|
try {
|
||||||
|
await processUnprocessedSessions(
|
||||||
|
config.sessionProcessing.batchSize === 0 ? null : config.sessionProcessing.batchSize,
|
||||||
|
config.sessionProcessing.concurrency
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
process.stderr.write(`[ProcessingScheduler] Error in scheduler: ${error}\n`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
295
lib/processingStatusManager.ts
Normal file
295
lib/processingStatusManager.ts
Normal file
@ -0,0 +1,295 @@
|
|||||||
|
import { PrismaClient, ProcessingStage, ProcessingStatus } from '@prisma/client';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Centralized processing status management
|
||||||
|
*/
|
||||||
|
export class ProcessingStatusManager {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize processing status for a session with all stages set to PENDING
|
||||||
|
*/
|
||||||
|
static async initializeSession(sessionId: string): Promise<void> {
|
||||||
|
const stages = [
|
||||||
|
ProcessingStage.CSV_IMPORT,
|
||||||
|
ProcessingStage.TRANSCRIPT_FETCH,
|
||||||
|
ProcessingStage.SESSION_CREATION,
|
||||||
|
ProcessingStage.AI_ANALYSIS,
|
||||||
|
ProcessingStage.QUESTION_EXTRACTION,
|
||||||
|
];
|
||||||
|
|
||||||
|
// Create all processing status records for this session
|
||||||
|
await prisma.sessionProcessingStatus.createMany({
|
||||||
|
data: stages.map(stage => ({
|
||||||
|
sessionId,
|
||||||
|
stage,
|
||||||
|
status: ProcessingStatus.PENDING,
|
||||||
|
})),
|
||||||
|
skipDuplicates: true, // In case some already exist
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a processing stage
|
||||||
|
*/
|
||||||
|
static async startStage(
|
||||||
|
sessionId: string,
|
||||||
|
stage: ProcessingStage,
|
||||||
|
metadata?: any
|
||||||
|
): Promise<void> {
|
||||||
|
await prisma.sessionProcessingStatus.upsert({
|
||||||
|
where: {
|
||||||
|
sessionId_stage: { sessionId, stage }
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: ProcessingStatus.IN_PROGRESS,
|
||||||
|
startedAt: new Date(),
|
||||||
|
errorMessage: null,
|
||||||
|
metadata: metadata || null,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
sessionId,
|
||||||
|
stage,
|
||||||
|
status: ProcessingStatus.IN_PROGRESS,
|
||||||
|
startedAt: new Date(),
|
||||||
|
metadata: metadata || null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete a processing stage successfully
|
||||||
|
*/
|
||||||
|
static async completeStage(
|
||||||
|
sessionId: string,
|
||||||
|
stage: ProcessingStage,
|
||||||
|
metadata?: any
|
||||||
|
): Promise<void> {
|
||||||
|
await prisma.sessionProcessingStatus.upsert({
|
||||||
|
where: {
|
||||||
|
sessionId_stage: { sessionId, stage }
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: ProcessingStatus.COMPLETED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: null,
|
||||||
|
metadata: metadata || null,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
sessionId,
|
||||||
|
stage,
|
||||||
|
status: ProcessingStatus.COMPLETED,
|
||||||
|
startedAt: new Date(),
|
||||||
|
completedAt: new Date(),
|
||||||
|
metadata: metadata || null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a processing stage as failed
|
||||||
|
*/
|
||||||
|
static async failStage(
|
||||||
|
sessionId: string,
|
||||||
|
stage: ProcessingStage,
|
||||||
|
errorMessage: string,
|
||||||
|
metadata?: any
|
||||||
|
): Promise<void> {
|
||||||
|
await prisma.sessionProcessingStatus.upsert({
|
||||||
|
where: {
|
||||||
|
sessionId_stage: { sessionId, stage }
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: ProcessingStatus.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage,
|
||||||
|
retryCount: { increment: 1 },
|
||||||
|
metadata: metadata || null,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
sessionId,
|
||||||
|
stage,
|
||||||
|
status: ProcessingStatus.FAILED,
|
||||||
|
startedAt: new Date(),
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage,
|
||||||
|
retryCount: 1,
|
||||||
|
metadata: metadata || null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Skip a processing stage (e.g., no transcript URL available)
|
||||||
|
*/
|
||||||
|
static async skipStage(
|
||||||
|
sessionId: string,
|
||||||
|
stage: ProcessingStage,
|
||||||
|
reason: string
|
||||||
|
): Promise<void> {
|
||||||
|
await prisma.sessionProcessingStatus.upsert({
|
||||||
|
where: {
|
||||||
|
sessionId_stage: { sessionId, stage }
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: ProcessingStatus.SKIPPED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: reason,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
sessionId,
|
||||||
|
stage,
|
||||||
|
status: ProcessingStatus.SKIPPED,
|
||||||
|
startedAt: new Date(),
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: reason,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get processing status for a specific session
|
||||||
|
*/
|
||||||
|
static async getSessionStatus(sessionId: string) {
|
||||||
|
return await prisma.sessionProcessingStatus.findMany({
|
||||||
|
where: { sessionId },
|
||||||
|
orderBy: { stage: 'asc' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get sessions that need processing for a specific stage
|
||||||
|
*/
|
||||||
|
static async getSessionsNeedingProcessing(
|
||||||
|
stage: ProcessingStage,
|
||||||
|
limit: number = 50
|
||||||
|
) {
|
||||||
|
return await prisma.sessionProcessingStatus.findMany({
|
||||||
|
where: {
|
||||||
|
stage,
|
||||||
|
status: ProcessingStatus.PENDING,
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
session: {
|
||||||
|
include: {
|
||||||
|
import: true,
|
||||||
|
company: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
take: limit,
|
||||||
|
orderBy: { session: { createdAt: 'asc' } },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get pipeline status overview
|
||||||
|
*/
|
||||||
|
static async getPipelineStatus() {
|
||||||
|
// Get counts by stage and status
|
||||||
|
const statusCounts = await prisma.sessionProcessingStatus.groupBy({
|
||||||
|
by: ['stage', 'status'],
|
||||||
|
_count: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total sessions
|
||||||
|
const totalSessions = await prisma.session.count();
|
||||||
|
|
||||||
|
// Organize the data
|
||||||
|
const pipeline: Record<string, Record<string, number>> = {};
|
||||||
|
|
||||||
|
for (const { stage, status, _count } of statusCounts) {
|
||||||
|
if (!pipeline[stage]) {
|
||||||
|
pipeline[stage] = {};
|
||||||
|
}
|
||||||
|
pipeline[stage][status] = _count.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalSessions,
|
||||||
|
pipeline,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get sessions with failed processing
|
||||||
|
*/
|
||||||
|
static async getFailedSessions(stage?: ProcessingStage) {
|
||||||
|
const where: any = {
|
||||||
|
status: ProcessingStatus.FAILED,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (stage) {
|
||||||
|
where.stage = stage;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await prisma.sessionProcessingStatus.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
session: {
|
||||||
|
include: {
|
||||||
|
import: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { completedAt: 'desc' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset a failed stage for retry
|
||||||
|
*/
|
||||||
|
static async resetStageForRetry(sessionId: string, stage: ProcessingStage): Promise<void> {
|
||||||
|
await prisma.sessionProcessingStatus.update({
|
||||||
|
where: {
|
||||||
|
sessionId_stage: { sessionId, stage }
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
status: ProcessingStatus.PENDING,
|
||||||
|
startedAt: null,
|
||||||
|
completedAt: null,
|
||||||
|
errorMessage: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session has completed a specific stage
|
||||||
|
*/
|
||||||
|
static async hasCompletedStage(sessionId: string, stage: ProcessingStage): Promise<boolean> {
|
||||||
|
const status = await prisma.sessionProcessingStatus.findUnique({
|
||||||
|
where: {
|
||||||
|
sessionId_stage: { sessionId, stage }
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return status?.status === ProcessingStatus.COMPLETED;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session is ready for a specific stage (previous stages completed)
|
||||||
|
*/
|
||||||
|
static async isReadyForStage(sessionId: string, stage: ProcessingStage): Promise<boolean> {
|
||||||
|
const stageOrder = [
|
||||||
|
ProcessingStage.CSV_IMPORT,
|
||||||
|
ProcessingStage.TRANSCRIPT_FETCH,
|
||||||
|
ProcessingStage.SESSION_CREATION,
|
||||||
|
ProcessingStage.AI_ANALYSIS,
|
||||||
|
ProcessingStage.QUESTION_EXTRACTION,
|
||||||
|
];
|
||||||
|
|
||||||
|
const currentStageIndex = stageOrder.indexOf(stage);
|
||||||
|
if (currentStageIndex === 0) return true; // First stage is always ready
|
||||||
|
|
||||||
|
// Check if all previous stages are completed
|
||||||
|
const previousStages = stageOrder.slice(0, currentStageIndex);
|
||||||
|
|
||||||
|
for (const prevStage of previousStages) {
|
||||||
|
const isCompleted = await this.hasCompletedStage(sessionId, prevStage);
|
||||||
|
if (!isCompleted) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
119
lib/scheduler.ts
119
lib/scheduler.ts
@ -1,67 +1,94 @@
|
|||||||
// node-cron job to auto-refresh session data every 15 mins
|
// CSV import scheduler with configurable intervals
|
||||||
import cron from "node-cron";
|
import cron from "node-cron";
|
||||||
import { prisma } from "./prisma";
|
import { prisma } from "./prisma";
|
||||||
import { fetchAndParseCsv } from "./csvFetcher";
|
import { fetchAndParseCsv } from "./csvFetcher";
|
||||||
|
import { getSchedulerConfig } from "./schedulerConfig";
|
||||||
|
|
||||||
interface SessionCreateData {
|
export function startCsvImportScheduler() {
|
||||||
id: string;
|
const config = getSchedulerConfig();
|
||||||
startTime: Date;
|
|
||||||
companyId: string;
|
|
||||||
[key: string]: unknown;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function startScheduler() {
|
if (!config.enabled) {
|
||||||
cron.schedule("*/15 * * * *", async () => {
|
console.log('[CSV Import Scheduler] Disabled via configuration');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[CSV Import Scheduler] Starting with interval: ${config.csvImport.interval}`);
|
||||||
|
|
||||||
|
cron.schedule(config.csvImport.interval, async () => {
|
||||||
const companies = await prisma.company.findMany();
|
const companies = await prisma.company.findMany();
|
||||||
for (const company of companies) {
|
for (const company of companies) {
|
||||||
try {
|
try {
|
||||||
const sessions = await fetchAndParseCsv(
|
const rawSessionData = await fetchAndParseCsv(
|
||||||
company.csvUrl,
|
company.csvUrl,
|
||||||
company.csvUsername as string | undefined,
|
company.csvUsername as string | undefined,
|
||||||
company.csvPassword as string | undefined
|
company.csvPassword as string | undefined
|
||||||
);
|
);
|
||||||
await prisma.session.deleteMany({ where: { companyId: company.id } });
|
|
||||||
|
|
||||||
for (const session of sessions) {
|
// Create SessionImport records for new data
|
||||||
const sessionData: SessionCreateData = {
|
for (const rawSession of rawSessionData) {
|
||||||
...session,
|
try {
|
||||||
companyId: company.id,
|
// Use upsert to handle duplicates gracefully
|
||||||
id: session.id || session.sessionId || `sess_${Date.now()}`,
|
await prisma.sessionImport.upsert({
|
||||||
// Ensure startTime is not undefined
|
where: {
|
||||||
startTime: session.startTime || new Date(),
|
companyId_externalSessionId: {
|
||||||
};
|
companyId: company.id,
|
||||||
|
externalSessionId: rawSession.externalSessionId,
|
||||||
// Only include fields that are properly typed for Prisma
|
},
|
||||||
await prisma.session.create({
|
},
|
||||||
data: {
|
update: {
|
||||||
id: sessionData.id,
|
// Update existing record with latest data
|
||||||
companyId: sessionData.companyId,
|
startTimeRaw: rawSession.startTimeRaw,
|
||||||
startTime: sessionData.startTime,
|
endTimeRaw: rawSession.endTimeRaw,
|
||||||
// endTime is required in the schema, so use startTime if not available
|
ipAddress: rawSession.ipAddress,
|
||||||
endTime: session.endTime || new Date(),
|
countryCode: rawSession.countryCode,
|
||||||
ipAddress: session.ipAddress || null,
|
language: rawSession.language,
|
||||||
country: session.country || null,
|
messagesSent: rawSession.messagesSent,
|
||||||
language: session.language || null,
|
sentimentRaw: rawSession.sentimentRaw,
|
||||||
sentiment:
|
escalatedRaw: rawSession.escalatedRaw,
|
||||||
typeof session.sentiment === "number"
|
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||||
? session.sentiment
|
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||||
: null,
|
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||||
messagesSent:
|
tokens: rawSession.tokens,
|
||||||
typeof session.messagesSent === "number"
|
tokensEur: rawSession.tokensEur,
|
||||||
? session.messagesSent
|
category: rawSession.category,
|
||||||
: 0,
|
initialMessage: rawSession.initialMessage,
|
||||||
category: session.category || null,
|
// Status tracking now handled by ProcessingStatusManager
|
||||||
},
|
},
|
||||||
});
|
create: {
|
||||||
|
companyId: company.id,
|
||||||
|
externalSessionId: rawSession.externalSessionId,
|
||||||
|
startTimeRaw: rawSession.startTimeRaw,
|
||||||
|
endTimeRaw: rawSession.endTimeRaw,
|
||||||
|
ipAddress: rawSession.ipAddress,
|
||||||
|
countryCode: rawSession.countryCode,
|
||||||
|
language: rawSession.language,
|
||||||
|
messagesSent: rawSession.messagesSent,
|
||||||
|
sentimentRaw: rawSession.sentimentRaw,
|
||||||
|
escalatedRaw: rawSession.escalatedRaw,
|
||||||
|
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||||
|
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||||
|
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||||
|
tokens: rawSession.tokens,
|
||||||
|
tokensEur: rawSession.tokensEur,
|
||||||
|
category: rawSession.category,
|
||||||
|
initialMessage: rawSession.initialMessage,
|
||||||
|
// Status tracking now handled by ProcessingStatusManager
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
// Log individual session import errors but continue processing
|
||||||
|
process.stderr.write(
|
||||||
|
`[Scheduler] Failed to import session ${rawSession.externalSessionId} for company ${company.name}: ${error}\n`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Using process.stdout.write instead of console.log to avoid ESLint warning
|
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
`[Scheduler] Refreshed sessions for company: ${company.name}\n`
|
`[Scheduler] Imported ${rawSessionData.length} session records for company: ${company.name}\n`
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Using process.stderr.write instead of console.error to avoid ESLint warning
|
|
||||||
process.stderr.write(
|
process.stderr.write(
|
||||||
`[Scheduler] Failed for company: ${company.name} - ${e}\n`
|
`[Scheduler] Failed to fetch CSV for company: ${company.name} - ${e}\n`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
44
lib/schedulerConfig.ts
Normal file
44
lib/schedulerConfig.ts
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
// Legacy scheduler configuration - now uses centralized env management
|
||||||
|
// This file is kept for backward compatibility but delegates to lib/env.ts
|
||||||
|
|
||||||
|
import { getSchedulerConfig as getEnvSchedulerConfig, logEnvConfig } from "./env";
|
||||||
|
|
||||||
|
export interface SchedulerConfig {
|
||||||
|
enabled: boolean;
|
||||||
|
csvImport: {
|
||||||
|
interval: string;
|
||||||
|
};
|
||||||
|
sessionProcessing: {
|
||||||
|
interval: string;
|
||||||
|
batchSize: number; // 0 = unlimited
|
||||||
|
concurrency: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get scheduler configuration from environment variables
|
||||||
|
* @deprecated Use getSchedulerConfig from lib/env.ts instead
|
||||||
|
*/
|
||||||
|
export function getSchedulerConfig(): SchedulerConfig {
|
||||||
|
const config = getEnvSchedulerConfig();
|
||||||
|
|
||||||
|
return {
|
||||||
|
enabled: config.enabled,
|
||||||
|
csvImport: {
|
||||||
|
interval: config.csvImport.interval,
|
||||||
|
},
|
||||||
|
sessionProcessing: {
|
||||||
|
interval: config.sessionProcessing.interval,
|
||||||
|
batchSize: config.sessionProcessing.batchSize,
|
||||||
|
concurrency: config.sessionProcessing.concurrency,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log scheduler configuration
|
||||||
|
* @deprecated Use logEnvConfig from lib/env.ts instead
|
||||||
|
*/
|
||||||
|
export function logSchedulerConfig(config: SchedulerConfig): void {
|
||||||
|
logEnvConfig();
|
||||||
|
}
|
||||||
18
lib/schedulers.ts
Normal file
18
lib/schedulers.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// Combined scheduler initialization
|
||||||
|
import { startCsvImportScheduler } from "./scheduler";
|
||||||
|
import { startProcessingScheduler } from "./processingScheduler";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all schedulers
|
||||||
|
* - CSV import scheduler (runs every 15 minutes)
|
||||||
|
* - Session processing scheduler (runs every hour)
|
||||||
|
*/
|
||||||
|
export function initializeSchedulers() {
|
||||||
|
// Start the CSV import scheduler
|
||||||
|
startCsvImportScheduler();
|
||||||
|
|
||||||
|
// Start the session processing scheduler
|
||||||
|
startProcessingScheduler();
|
||||||
|
|
||||||
|
console.log("All schedulers initialized successfully");
|
||||||
|
}
|
||||||
151
lib/transcriptFetcher.ts
Normal file
151
lib/transcriptFetcher.ts
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
// Transcript fetching utility
|
||||||
|
import fetch from "node-fetch";
|
||||||
|
|
||||||
|
export interface TranscriptFetchResult {
|
||||||
|
success: boolean;
|
||||||
|
content?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch transcript content from a URL
|
||||||
|
* @param url The transcript URL
|
||||||
|
* @param username Optional username for authentication
|
||||||
|
* @param password Optional password for authentication
|
||||||
|
* @returns Promise with fetch result
|
||||||
|
*/
|
||||||
|
export async function fetchTranscriptContent(
|
||||||
|
url: string,
|
||||||
|
username?: string,
|
||||||
|
password?: string
|
||||||
|
): Promise<TranscriptFetchResult> {
|
||||||
|
try {
|
||||||
|
if (!url || !url.trim()) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'No transcript URL provided',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare authentication header if credentials provided
|
||||||
|
const authHeader =
|
||||||
|
username && password
|
||||||
|
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'LiveDash-Transcript-Fetcher/1.0',
|
||||||
|
};
|
||||||
|
|
||||||
|
if (authHeader) {
|
||||||
|
headers.Authorization = authHeader;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch the transcript with timeout
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers,
|
||||||
|
signal: controller.signal,
|
||||||
|
});
|
||||||
|
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `HTTP ${response.status}: ${response.statusText}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await response.text();
|
||||||
|
|
||||||
|
if (!content || content.trim().length === 0) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Empty transcript content',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
content: content.trim(),
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
|
|
||||||
|
// Handle common network errors
|
||||||
|
if (errorMessage.includes('ENOTFOUND')) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Domain not found',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorMessage.includes('ECONNREFUSED')) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Connection refused',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorMessage.includes('timeout')) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Request timeout',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate if a URL looks like a valid transcript URL
|
||||||
|
* @param url The URL to validate
|
||||||
|
* @returns boolean indicating if URL appears valid
|
||||||
|
*/
|
||||||
|
export function isValidTranscriptUrl(url: string): boolean {
|
||||||
|
if (!url || typeof url !== 'string') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const parsedUrl = new URL(url);
|
||||||
|
return parsedUrl.protocol === 'http:' || parsedUrl.protocol === 'https:';
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract session ID from transcript content if possible
|
||||||
|
* This is a helper function that can be enhanced based on transcript format
|
||||||
|
* @param content The transcript content
|
||||||
|
* @returns Extracted session ID or null
|
||||||
|
*/
|
||||||
|
export function extractSessionIdFromTranscript(content: string): string | null {
|
||||||
|
if (!content) return null;
|
||||||
|
|
||||||
|
// Look for common session ID patterns
|
||||||
|
const patterns = [
|
||||||
|
/session[_-]?id[:\s]*([a-zA-Z0-9-]+)/i,
|
||||||
|
/id[:\s]*([a-zA-Z0-9-]{8,})/i,
|
||||||
|
/^([a-zA-Z0-9-]{8,})/m, // First line might be session ID
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
const match = content.match(pattern);
|
||||||
|
if (match && match[1]) {
|
||||||
|
return match[1].trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
360
lib/transcriptParser.ts
Normal file
360
lib/transcriptParser.ts
Normal file
@ -0,0 +1,360 @@
|
|||||||
|
// Transcript parsing utility for converting raw transcript content into structured messages
|
||||||
|
import { prisma } from './prisma.js';
|
||||||
|
|
||||||
|
export interface ParsedMessage {
|
||||||
|
sessionId: string;
|
||||||
|
timestamp: Date;
|
||||||
|
role: string;
|
||||||
|
content: string;
|
||||||
|
order: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TranscriptParseResult {
|
||||||
|
success: boolean;
|
||||||
|
messages?: ParsedMessage[];
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse European date format (DD.MM.YYYY HH:mm:ss) to Date object
|
||||||
|
*/
|
||||||
|
function parseEuropeanDate(dateStr: string): Date {
|
||||||
|
const match = dateStr.match(/(\d{2})\.(\d{2})\.(\d{4}) (\d{2}):(\d{2}):(\d{2})/);
|
||||||
|
if (!match) {
|
||||||
|
throw new Error(`Invalid date format: ${dateStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, day, month, year, hour, minute, second] = match;
|
||||||
|
return new Date(
|
||||||
|
parseInt(year, 10),
|
||||||
|
parseInt(month, 10) - 1, // JavaScript months are 0-indexed
|
||||||
|
parseInt(day, 10),
|
||||||
|
parseInt(hour, 10),
|
||||||
|
parseInt(minute, 10),
|
||||||
|
parseInt(second, 10)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse raw transcript content into structured messages
|
||||||
|
* @param content Raw transcript content
|
||||||
|
* @param startTime Session start time
|
||||||
|
* @param endTime Session end time
|
||||||
|
* @returns Parsed messages with timestamps
|
||||||
|
*/
|
||||||
|
export function parseTranscriptToMessages(
|
||||||
|
content: string,
|
||||||
|
startTime: Date,
|
||||||
|
endTime: Date
|
||||||
|
): TranscriptParseResult {
|
||||||
|
try {
|
||||||
|
if (!content || !content.trim()) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Empty transcript content'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const messages: ParsedMessage[] = [];
|
||||||
|
const lines = content.split('\n');
|
||||||
|
let currentMessage: { role: string; content: string; timestamp?: string } | null = null;
|
||||||
|
let order = 0;
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmedLine = line.trim();
|
||||||
|
|
||||||
|
// Skip empty lines
|
||||||
|
if (!trimmedLine) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if line starts with a timestamp and role [DD.MM.YYYY HH:MM:SS] Role: content
|
||||||
|
const timestampRoleMatch = trimmedLine.match(/^\[(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}:\d{2})\]\s+(User|Assistant|System|user|assistant|system):\s*(.*)$/i);
|
||||||
|
|
||||||
|
// Check if line starts with just a role (User:, Assistant:, System:, etc.)
|
||||||
|
const roleMatch = trimmedLine.match(/^(User|Assistant|System|user|assistant|system):\s*(.*)$/i);
|
||||||
|
|
||||||
|
if (timestampRoleMatch) {
|
||||||
|
// Save previous message if exists
|
||||||
|
if (currentMessage) {
|
||||||
|
messages.push({
|
||||||
|
sessionId: '', // Will be set by caller
|
||||||
|
timestamp: new Date(), // Will be calculated below
|
||||||
|
role: currentMessage.role,
|
||||||
|
content: currentMessage.content.trim(),
|
||||||
|
order: order++
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start new message with timestamp
|
||||||
|
const timestamp = timestampRoleMatch[1];
|
||||||
|
const role = timestampRoleMatch[2].charAt(0).toUpperCase() + timestampRoleMatch[2].slice(1).toLowerCase();
|
||||||
|
const content = timestampRoleMatch[3] || '';
|
||||||
|
|
||||||
|
currentMessage = {
|
||||||
|
role,
|
||||||
|
content,
|
||||||
|
timestamp // Store the timestamp for later parsing
|
||||||
|
};
|
||||||
|
} else if (roleMatch) {
|
||||||
|
// Save previous message if exists
|
||||||
|
if (currentMessage) {
|
||||||
|
messages.push({
|
||||||
|
sessionId: '', // Will be set by caller
|
||||||
|
timestamp: new Date(), // Will be calculated below
|
||||||
|
role: currentMessage.role,
|
||||||
|
content: currentMessage.content.trim(),
|
||||||
|
order: order++
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start new message without timestamp
|
||||||
|
const role = roleMatch[1].charAt(0).toUpperCase() + roleMatch[1].slice(1).toLowerCase();
|
||||||
|
const content = roleMatch[2] || '';
|
||||||
|
|
||||||
|
currentMessage = {
|
||||||
|
role,
|
||||||
|
content
|
||||||
|
};
|
||||||
|
} else if (currentMessage) {
|
||||||
|
// Continue previous message (multi-line)
|
||||||
|
currentMessage.content += '\n' + trimmedLine;
|
||||||
|
}
|
||||||
|
// If no current message and no role match, skip the line (orphaned content)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the last message
|
||||||
|
if (currentMessage) {
|
||||||
|
messages.push({
|
||||||
|
sessionId: '', // Will be set by caller
|
||||||
|
timestamp: new Date(), // Will be calculated below
|
||||||
|
role: currentMessage.role,
|
||||||
|
content: currentMessage.content.trim(),
|
||||||
|
order: order++
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (messages.length === 0) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'No messages found in transcript'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate timestamps - use parsed timestamps if available, otherwise distribute across session duration
|
||||||
|
const hasTimestamps = messages.some(msg => (msg as any).timestamp);
|
||||||
|
|
||||||
|
if (hasTimestamps) {
|
||||||
|
// Use parsed timestamps from the transcript
|
||||||
|
messages.forEach((message, index) => {
|
||||||
|
const msgWithTimestamp = message as any;
|
||||||
|
if (msgWithTimestamp.timestamp) {
|
||||||
|
try {
|
||||||
|
message.timestamp = parseEuropeanDate(msgWithTimestamp.timestamp);
|
||||||
|
} catch (error) {
|
||||||
|
// Fallback to distributed timestamp if parsing fails
|
||||||
|
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||||
|
const messageInterval = messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
||||||
|
message.timestamp = new Date(startTime.getTime() + (index * messageInterval));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback to distributed timestamp
|
||||||
|
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||||
|
const messageInterval = messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
||||||
|
message.timestamp = new Date(startTime.getTime() + (index * messageInterval));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Distribute messages across session duration
|
||||||
|
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||||
|
const messageInterval = messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
||||||
|
|
||||||
|
messages.forEach((message, index) => {
|
||||||
|
message.timestamp = new Date(startTime.getTime() + (index * messageInterval));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
messages
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store parsed messages in the database for a session
|
||||||
|
* @param sessionId The session ID
|
||||||
|
* @param messages Array of parsed messages
|
||||||
|
*/
|
||||||
|
export async function storeMessagesForSession(
|
||||||
|
sessionId: string,
|
||||||
|
messages: ParsedMessage[]
|
||||||
|
): Promise<void> {
|
||||||
|
// Delete existing messages for this session (in case of re-processing)
|
||||||
|
await prisma.message.deleteMany({
|
||||||
|
where: { sessionId }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create new messages
|
||||||
|
const messagesWithSessionId = messages.map(msg => ({
|
||||||
|
...msg,
|
||||||
|
sessionId
|
||||||
|
}));
|
||||||
|
|
||||||
|
await prisma.message.createMany({
|
||||||
|
data: messagesWithSessionId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process transcript for a single session
|
||||||
|
* @param sessionId The session ID to process
|
||||||
|
*/
|
||||||
|
export async function processSessionTranscript(sessionId: string): Promise<void> {
|
||||||
|
// Get the session and its import data
|
||||||
|
const session = await prisma.session.findUnique({
|
||||||
|
where: { id: sessionId },
|
||||||
|
include: {
|
||||||
|
import: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!session) {
|
||||||
|
throw new Error(`Session not found: ${sessionId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!session.import) {
|
||||||
|
throw new Error(`No import data found for session: ${sessionId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!session.import.rawTranscriptContent) {
|
||||||
|
throw new Error(`No transcript content found for session: ${sessionId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the start and end times
|
||||||
|
const startTime = parseEuropeanDate(session.import.startTimeRaw);
|
||||||
|
const endTime = parseEuropeanDate(session.import.endTimeRaw);
|
||||||
|
|
||||||
|
// Parse the transcript
|
||||||
|
const parseResult = parseTranscriptToMessages(
|
||||||
|
session.import.rawTranscriptContent,
|
||||||
|
startTime,
|
||||||
|
endTime
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!parseResult.success) {
|
||||||
|
throw new Error(`Failed to parse transcript: ${parseResult.error}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store the messages
|
||||||
|
await storeMessagesForSession(sessionId, parseResult.messages!);
|
||||||
|
|
||||||
|
console.log(`✅ Processed ${parseResult.messages!.length} messages for session ${sessionId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process all sessions that have transcript content but no messages
|
||||||
|
*/
|
||||||
|
export async function processAllUnparsedTranscripts(): Promise<void> {
|
||||||
|
console.log('🔍 Finding sessions with unparsed transcripts...');
|
||||||
|
|
||||||
|
// Find sessions that have transcript content but no messages
|
||||||
|
const sessionsToProcess = await prisma.session.findMany({
|
||||||
|
where: {
|
||||||
|
import: {
|
||||||
|
rawTranscriptContent: {
|
||||||
|
not: null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
messages: {
|
||||||
|
none: {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
import: true,
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
messages: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`📋 Found ${sessionsToProcess.length} sessions to process`);
|
||||||
|
|
||||||
|
let processed = 0;
|
||||||
|
let errors = 0;
|
||||||
|
|
||||||
|
for (const session of sessionsToProcess) {
|
||||||
|
try {
|
||||||
|
await processSessionTranscript(session.id);
|
||||||
|
processed++;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`❌ Error processing session ${session.id}:`, error);
|
||||||
|
errors++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n📊 Processing complete:`);
|
||||||
|
console.log(` ✅ Successfully processed: ${processed} sessions`);
|
||||||
|
console.log(` ❌ Errors: ${errors} sessions`);
|
||||||
|
console.log(` 📝 Total messages created: ${await getTotalMessageCount()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get total count of messages in the database
|
||||||
|
*/
|
||||||
|
export async function getTotalMessageCount(): Promise<number> {
|
||||||
|
const result = await prisma.message.count();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get messages for a specific session
|
||||||
|
* @param sessionId The session ID
|
||||||
|
* @returns Array of messages ordered by order field
|
||||||
|
*/
|
||||||
|
export async function getMessagesForSession(sessionId: string) {
|
||||||
|
return await prisma.message.findMany({
|
||||||
|
where: { sessionId },
|
||||||
|
orderBy: { order: 'asc' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get parsing statistics
|
||||||
|
*/
|
||||||
|
export async function getParsingStats() {
|
||||||
|
const totalSessions = await prisma.session.count();
|
||||||
|
const sessionsWithTranscripts = await prisma.session.count({
|
||||||
|
where: {
|
||||||
|
import: {
|
||||||
|
rawTranscriptContent: {
|
||||||
|
not: null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const sessionsWithMessages = await prisma.session.count({
|
||||||
|
where: {
|
||||||
|
messages: {
|
||||||
|
some: {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const totalMessages = await getTotalMessageCount();
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalSessions,
|
||||||
|
sessionsWithTranscripts,
|
||||||
|
sessionsWithMessages,
|
||||||
|
unparsedSessions: sessionsWithTranscripts - sessionsWithMessages,
|
||||||
|
totalMessages
|
||||||
|
};
|
||||||
|
}
|
||||||
29
lib/types.ts
29
lib/types.ts
@ -35,6 +35,16 @@ export interface User {
|
|||||||
updatedAt: Date;
|
updatedAt: Date;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Message {
|
||||||
|
id: string;
|
||||||
|
sessionId: string;
|
||||||
|
timestamp: Date | null;
|
||||||
|
role: string; // "User", "Assistant", "System", etc.
|
||||||
|
content: string;
|
||||||
|
order: number; // Order within the conversation (0, 1, 2, ...)
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
export interface ChatSession {
|
export interface ChatSession {
|
||||||
id: string;
|
id: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
@ -44,7 +54,7 @@ export interface ChatSession {
|
|||||||
language?: string | null;
|
language?: string | null;
|
||||||
country?: string | null;
|
country?: string | null;
|
||||||
ipAddress?: string | null;
|
ipAddress?: string | null;
|
||||||
sentiment?: number | null;
|
sentiment?: string | null; // Now a SentimentCategory enum: "POSITIVE", "NEUTRAL", "NEGATIVE"
|
||||||
messagesSent?: number;
|
messagesSent?: number;
|
||||||
startTime: Date;
|
startTime: Date;
|
||||||
endTime?: Date | null;
|
endTime?: Date | null;
|
||||||
@ -55,11 +65,11 @@ export interface ChatSession {
|
|||||||
avgResponseTime?: number | null;
|
avgResponseTime?: number | null;
|
||||||
escalated?: boolean;
|
escalated?: boolean;
|
||||||
forwardedHr?: boolean;
|
forwardedHr?: boolean;
|
||||||
tokens?: number;
|
|
||||||
tokensEur?: number;
|
|
||||||
initialMsg?: string;
|
initialMsg?: string;
|
||||||
fullTranscriptUrl?: string | null;
|
fullTranscriptUrl?: string | null;
|
||||||
transcriptContent?: string | null;
|
summary?: string | null; // Brief summary of the conversation
|
||||||
|
messages?: Message[]; // Parsed messages from transcript
|
||||||
|
transcriptContent?: string | null; // Full transcript content
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SessionQuery {
|
export interface SessionQuery {
|
||||||
@ -105,6 +115,11 @@ export interface WordCloudWord {
|
|||||||
value: number;
|
value: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface TopQuestion {
|
||||||
|
question: string;
|
||||||
|
count: number;
|
||||||
|
}
|
||||||
|
|
||||||
export interface MetricsResult {
|
export interface MetricsResult {
|
||||||
totalSessions: number;
|
totalSessions: number;
|
||||||
avgSessionsPerDay: number;
|
avgSessionsPerDay: number;
|
||||||
@ -139,6 +154,12 @@ export interface MetricsResult {
|
|||||||
avgSessionTimeTrend?: number; // e.g., percentage change in avgSessionLength
|
avgSessionTimeTrend?: number; // e.g., percentage change in avgSessionLength
|
||||||
avgResponseTimeTrend?: number; // e.g., percentage change in avgResponseTime
|
avgResponseTimeTrend?: number; // e.g., percentage change in avgResponseTime
|
||||||
|
|
||||||
|
// New metrics for enhanced dashboard
|
||||||
|
avgDailyCosts?: number; // Average daily costs in euros
|
||||||
|
peakUsageTime?: string; // Peak usage time (e.g., "14:00-15:00")
|
||||||
|
resolvedChatsPercentage?: number; // Percentage of resolved chats
|
||||||
|
topQuestions?: TopQuestion[]; // Top 5 most asked questions
|
||||||
|
|
||||||
// Debug properties
|
// Debug properties
|
||||||
totalSessionDuration?: number;
|
totalSessionDuration?: number;
|
||||||
validSessionsForDuration?: number;
|
validSessionsForDuration?: number;
|
||||||
|
|||||||
6
lib/utils.ts
Normal file
6
lib/utils.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import { clsx, type ClassValue } from "clsx"
|
||||||
|
import { twMerge } from "tailwind-merge"
|
||||||
|
|
||||||
|
export function cn(...inputs: ClassValue[]) {
|
||||||
|
return twMerge(clsx(inputs))
|
||||||
|
}
|
||||||
129
migrate-to-refactored-system.ts
Normal file
129
migrate-to-refactored-system.ts
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
import { PrismaClient, ProcessingStage, ProcessingStatus } from '@prisma/client';
|
||||||
|
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
async function migrateToRefactoredSystem() {
|
||||||
|
try {
|
||||||
|
console.log('=== MIGRATING TO REFACTORED PROCESSING SYSTEM ===\n');
|
||||||
|
|
||||||
|
// Get all existing sessions
|
||||||
|
const sessions = await prisma.session.findMany({
|
||||||
|
include: {
|
||||||
|
import: true,
|
||||||
|
messages: true,
|
||||||
|
sessionQuestions: true,
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: 'asc' }
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Found ${sessions.length} sessions to migrate...\n`);
|
||||||
|
|
||||||
|
let migratedCount = 0;
|
||||||
|
|
||||||
|
for (const session of sessions) {
|
||||||
|
console.log(`Migrating session ${session.import?.externalSessionId || session.id}...`);
|
||||||
|
|
||||||
|
// Initialize processing status for this session
|
||||||
|
await ProcessingStatusManager.initializeSession(session.id);
|
||||||
|
|
||||||
|
// Determine the current state of each stage based on existing data
|
||||||
|
|
||||||
|
// 1. CSV_IMPORT - Always completed if session exists
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.CSV_IMPORT, {
|
||||||
|
migratedFrom: 'existing_session',
|
||||||
|
importId: session.importId
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. TRANSCRIPT_FETCH - Check if transcript content exists
|
||||||
|
if (session.import?.rawTranscriptContent) {
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.TRANSCRIPT_FETCH, {
|
||||||
|
migratedFrom: 'existing_transcript',
|
||||||
|
contentLength: session.import.rawTranscriptContent.length
|
||||||
|
});
|
||||||
|
} else if (!session.import?.fullTranscriptUrl) {
|
||||||
|
// No transcript URL - skip this stage
|
||||||
|
await ProcessingStatusManager.skipStage(session.id, ProcessingStage.TRANSCRIPT_FETCH, 'No transcript URL in original import');
|
||||||
|
} else {
|
||||||
|
// Has URL but no content - mark as pending for retry
|
||||||
|
console.log(` - Transcript fetch pending for ${session.import.externalSessionId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. SESSION_CREATION - Check if messages exist
|
||||||
|
if (session.messages.length > 0) {
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.SESSION_CREATION, {
|
||||||
|
migratedFrom: 'existing_messages',
|
||||||
|
messageCount: session.messages.length
|
||||||
|
});
|
||||||
|
} else if (session.import?.rawTranscriptContent) {
|
||||||
|
// Has transcript but no messages - needs reprocessing
|
||||||
|
console.log(` - Session creation pending for ${session.import.externalSessionId} (has transcript but no messages)`);
|
||||||
|
} else {
|
||||||
|
// No transcript content - skip or mark as pending based on transcript fetch status
|
||||||
|
if (!session.import?.fullTranscriptUrl) {
|
||||||
|
await ProcessingStatusManager.skipStage(session.id, ProcessingStage.SESSION_CREATION, 'No transcript content available');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. AI_ANALYSIS - Check if AI fields are populated
|
||||||
|
const hasAIAnalysis = session.summary || session.sentiment || session.category || session.language;
|
||||||
|
if (hasAIAnalysis) {
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.AI_ANALYSIS, {
|
||||||
|
migratedFrom: 'existing_ai_analysis',
|
||||||
|
hasSummary: !!session.summary,
|
||||||
|
hasSentiment: !!session.sentiment,
|
||||||
|
hasCategory: !!session.category,
|
||||||
|
hasLanguage: !!session.language
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// No AI analysis - mark as pending if session creation is complete
|
||||||
|
if (session.messages.length > 0) {
|
||||||
|
console.log(` - AI analysis pending for ${session.import?.externalSessionId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. QUESTION_EXTRACTION - Check if questions exist
|
||||||
|
if (session.sessionQuestions.length > 0) {
|
||||||
|
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.QUESTION_EXTRACTION, {
|
||||||
|
migratedFrom: 'existing_questions',
|
||||||
|
questionCount: session.sessionQuestions.length
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// No questions - mark as pending if AI analysis is complete
|
||||||
|
if (hasAIAnalysis) {
|
||||||
|
console.log(` - Question extraction pending for ${session.import?.externalSessionId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
migratedCount++;
|
||||||
|
|
||||||
|
if (migratedCount % 10 === 0) {
|
||||||
|
console.log(` Migrated ${migratedCount}/${sessions.length} sessions...`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n✓ Successfully migrated ${migratedCount} sessions to the new processing system`);
|
||||||
|
|
||||||
|
// Show final status
|
||||||
|
console.log('\n=== MIGRATION COMPLETE - FINAL STATUS ===');
|
||||||
|
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||||
|
|
||||||
|
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||||
|
|
||||||
|
for (const stage of stages) {
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
const pending = stageData.PENDING || 0;
|
||||||
|
const completed = stageData.COMPLETED || 0;
|
||||||
|
const skipped = stageData.SKIPPED || 0;
|
||||||
|
|
||||||
|
console.log(`${stage}: ${completed} completed, ${pending} pending, ${skipped} skipped`);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error migrating to refactored system:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
migrateToRefactoredSystem();
|
||||||
@ -5,10 +5,8 @@ const nextConfig = {
|
|||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
// Allow cross-origin requests from specific origins in development
|
// Allow cross-origin requests from specific origins in development
|
||||||
allowedDevOrigins: [
|
allowedDevOrigins: [
|
||||||
"192.168.1.2",
|
|
||||||
"localhost",
|
"localhost",
|
||||||
"propc",
|
"127.0.0.1"
|
||||||
"test123.kjanat.com",
|
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
9506
package-lock.json
generated
9506
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
75
package.json
75
package.json
@ -3,39 +3,71 @@
|
|||||||
"type": "module",
|
"type": "module",
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"build": "next build",
|
||||||
|
"dev": "tsx server.ts",
|
||||||
|
"dev:next-only": "next dev --turbopack",
|
||||||
|
"format": "npx prettier --write .",
|
||||||
|
"format:check": "npx prettier --check .",
|
||||||
|
"lint": "next lint",
|
||||||
|
"lint:fix": "npx eslint --fix",
|
||||||
|
"prisma:generate": "prisma generate",
|
||||||
|
"prisma:migrate": "prisma migrate dev",
|
||||||
|
"prisma:seed": "tsx prisma/seed.ts",
|
||||||
|
"prisma:push": "prisma db push",
|
||||||
|
"prisma:push:force": "prisma db push --force-reset",
|
||||||
|
"prisma:studio": "prisma studio",
|
||||||
|
"start": "node server.mjs",
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest",
|
||||||
|
"test:coverage": "vitest run --coverage",
|
||||||
|
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
||||||
|
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\""
|
||||||
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/client": "^6.8.2",
|
"@prisma/adapter-pg": "^6.10.1",
|
||||||
|
"@prisma/client": "^6.10.1",
|
||||||
|
"@radix-ui/react-dropdown-menu": "^2.1.15",
|
||||||
|
"@radix-ui/react-separator": "^1.1.7",
|
||||||
|
"@radix-ui/react-slot": "^1.2.3",
|
||||||
|
"@radix-ui/react-tooltip": "^1.2.7",
|
||||||
"@rapideditor/country-coder": "^5.4.0",
|
"@rapideditor/country-coder": "^5.4.0",
|
||||||
"@types/d3": "^7.4.3",
|
"@types/d3": "^7.4.3",
|
||||||
"@types/d3-cloud": "^1.2.9",
|
"@types/d3-cloud": "^1.2.9",
|
||||||
|
"@types/d3-selection": "^3.0.11",
|
||||||
"@types/geojson": "^7946.0.16",
|
"@types/geojson": "^7946.0.16",
|
||||||
"@types/leaflet": "^1.9.18",
|
"@types/leaflet": "^1.9.18",
|
||||||
"@types/node-fetch": "^2.6.12",
|
"@types/node-fetch": "^2.6.12",
|
||||||
"bcryptjs": "^3.0.2",
|
"bcryptjs": "^3.0.2",
|
||||||
"chart.js": "^4.0.0",
|
"class-variance-authority": "^0.7.1",
|
||||||
"chartjs-plugin-annotation": "^3.1.0",
|
"clsx": "^2.1.1",
|
||||||
"csv-parse": "^5.5.0",
|
"csv-parse": "^5.5.0",
|
||||||
"d3": "^7.9.0",
|
"d3": "^7.9.0",
|
||||||
"d3-cloud": "^1.2.7",
|
"d3-cloud": "^1.2.7",
|
||||||
|
"d3-selection": "^3.0.0",
|
||||||
"i18n-iso-countries": "^7.14.0",
|
"i18n-iso-countries": "^7.14.0",
|
||||||
"iso-639-1": "^3.1.5",
|
"iso-639-1": "^3.1.5",
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
|
"lucide-react": "^0.525.0",
|
||||||
"next": "^15.3.2",
|
"next": "^15.3.2",
|
||||||
"next-auth": "^4.24.11",
|
"next-auth": "^4.24.11",
|
||||||
"node-cron": "^4.0.7",
|
"node-cron": "^4.0.7",
|
||||||
"node-fetch": "^3.3.2",
|
"node-fetch": "^3.3.2",
|
||||||
"react": "^19.1.0",
|
"react": "^19.1.0",
|
||||||
"react-chartjs-2": "^5.0.0",
|
|
||||||
"react-dom": "^19.1.0",
|
"react-dom": "^19.1.0",
|
||||||
"react-leaflet": "^5.0.0",
|
"react-leaflet": "^5.0.0",
|
||||||
"react-markdown": "^10.1.0",
|
"react-markdown": "^10.1.0",
|
||||||
"rehype-raw": "^7.0.0"
|
"recharts": "^3.0.2",
|
||||||
|
"rehype-raw": "^7.0.0",
|
||||||
|
"tailwind-merge": "^3.3.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/eslintrc": "^3.3.1",
|
"@eslint/eslintrc": "^3.3.1",
|
||||||
"@eslint/js": "^9.27.0",
|
"@eslint/js": "^9.27.0",
|
||||||
"@playwright/test": "^1.52.0",
|
"@playwright/test": "^1.52.0",
|
||||||
"@tailwindcss/postcss": "^4.1.7",
|
"@tailwindcss/postcss": "^4.1.11",
|
||||||
|
"@testing-library/dom": "^10.4.0",
|
||||||
|
"@testing-library/react": "^16.3.0",
|
||||||
"@types/bcryptjs": "^2.4.2",
|
"@types/bcryptjs": "^2.4.2",
|
||||||
"@types/node": "^22.15.21",
|
"@types/node": "^22.15.21",
|
||||||
"@types/node-cron": "^3.0.8",
|
"@types/node-cron": "^3.0.8",
|
||||||
@ -43,32 +75,24 @@
|
|||||||
"@types/react-dom": "^19.1.5",
|
"@types/react-dom": "^19.1.5",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.32.1",
|
"@typescript-eslint/eslint-plugin": "^8.32.1",
|
||||||
"@typescript-eslint/parser": "^8.32.1",
|
"@typescript-eslint/parser": "^8.32.1",
|
||||||
|
"@vitejs/plugin-react": "^4.6.0",
|
||||||
|
"@vitest/coverage-v8": "^3.2.4",
|
||||||
"eslint": "^9.27.0",
|
"eslint": "^9.27.0",
|
||||||
"eslint-config-next": "^15.3.2",
|
"eslint-config-next": "^15.3.2",
|
||||||
"eslint-plugin-prettier": "^5.4.0",
|
"eslint-plugin-prettier": "^5.4.0",
|
||||||
|
"jsdom": "^26.1.0",
|
||||||
"markdownlint-cli2": "^0.18.1",
|
"markdownlint-cli2": "^0.18.1",
|
||||||
"postcss": "^8.5.3",
|
"postcss": "^8.5.3",
|
||||||
"prettier": "^3.5.3",
|
"prettier": "^3.5.3",
|
||||||
"prettier-plugin-jinja-template": "^2.1.0",
|
"prettier-plugin-jinja-template": "^2.1.0",
|
||||||
"prisma": "^6.8.2",
|
"prisma": "^6.10.1",
|
||||||
"tailwindcss": "^4.1.7",
|
"tailwindcss": "^4.1.11",
|
||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
"typescript": "^5.0.0"
|
"tsx": "^4.20.3",
|
||||||
},
|
"tw-animate-css": "^1.3.4",
|
||||||
"scripts": {
|
"typescript": "^5.0.0",
|
||||||
"build": "next build",
|
"vite-tsconfig-paths": "^5.1.4",
|
||||||
"dev": "next dev --turbopack",
|
"vitest": "^3.2.4"
|
||||||
"format": "npx prettier --write .",
|
|
||||||
"format:check": "npx prettier --check .",
|
|
||||||
"lint": "next lint",
|
|
||||||
"lint:fix": "npx eslint --fix",
|
|
||||||
"prisma:generate": "prisma generate",
|
|
||||||
"prisma:migrate": "prisma migrate dev",
|
|
||||||
"prisma:seed": "node prisma/seed.mjs",
|
|
||||||
"prisma:studio": "prisma studio",
|
|
||||||
"start": "next start",
|
|
||||||
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
|
||||||
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\""
|
|
||||||
},
|
},
|
||||||
"prettier": {
|
"prettier": {
|
||||||
"bracketSpacing": true,
|
"bracketSpacing": true,
|
||||||
@ -118,5 +142,6 @@
|
|||||||
".git",
|
".git",
|
||||||
"*.json"
|
"*.json"
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"packageManager": "pnpm@10.12.4"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,158 +0,0 @@
|
|||||||
// API route to refresh (fetch+parse+update) session data for a company
|
|
||||||
import { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
import { fetchAndParseCsv } from "../../../lib/csvFetcher";
|
|
||||||
import { prisma } from "../../../lib/prisma";
|
|
||||||
|
|
||||||
interface SessionCreateData {
|
|
||||||
id: string;
|
|
||||||
startTime: Date;
|
|
||||||
companyId: string;
|
|
||||||
sessionId?: string;
|
|
||||||
[key: string]: unknown;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetches transcript content from a URL
|
|
||||||
* @param url The URL to fetch the transcript from
|
|
||||||
* @returns The transcript content or null if fetching fails
|
|
||||||
*/
|
|
||||||
async function fetchTranscriptContent(url: string): Promise<string | null> {
|
|
||||||
try {
|
|
||||||
const response = await fetch(url);
|
|
||||||
if (!response.ok) {
|
|
||||||
process.stderr.write(
|
|
||||||
`Error fetching transcript: ${response.statusText}\n`
|
|
||||||
);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return await response.text();
|
|
||||||
} catch (error) {
|
|
||||||
process.stderr.write(`Failed to fetch transcript: ${error}\n`);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
// Check if this is a POST request
|
|
||||||
if (req.method !== "POST") {
|
|
||||||
return res.status(405).json({ error: "Method not allowed" });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get companyId from body or query
|
|
||||||
let { companyId } = req.body;
|
|
||||||
|
|
||||||
if (!companyId) {
|
|
||||||
// Try to get user from prisma based on session cookie
|
|
||||||
try {
|
|
||||||
const session = await prisma.session.findFirst({
|
|
||||||
orderBy: { createdAt: "desc" },
|
|
||||||
where: {
|
|
||||||
/* Add session check criteria here */
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (session) {
|
|
||||||
companyId = session.companyId;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Log error for server-side debugging
|
|
||||||
const errorMessage =
|
|
||||||
error instanceof Error ? error.message : String(error);
|
|
||||||
// Use a server-side logging approach instead of console
|
|
||||||
process.stderr.write(`Error fetching session: ${errorMessage}\n`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!companyId) {
|
|
||||||
return res.status(400).json({ error: "Company ID is required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const company = await prisma.company.findUnique({ where: { id: companyId } });
|
|
||||||
if (!company) return res.status(404).json({ error: "Company not found" });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const sessions = await fetchAndParseCsv(
|
|
||||||
company.csvUrl,
|
|
||||||
company.csvUsername as string | undefined,
|
|
||||||
company.csvPassword as string | undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
// Replace all session rows for this company (for demo simplicity)
|
|
||||||
await prisma.session.deleteMany({ where: { companyId: company.id } });
|
|
||||||
|
|
||||||
for (const session of sessions) {
|
|
||||||
const sessionData: SessionCreateData = {
|
|
||||||
...session,
|
|
||||||
companyId: company.id,
|
|
||||||
id:
|
|
||||||
session.id ||
|
|
||||||
session.sessionId ||
|
|
||||||
`sess_${Date.now()}_${Math.random().toString(36).substring(2, 7)}`,
|
|
||||||
// Ensure startTime is not undefined
|
|
||||||
startTime: session.startTime || new Date(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Validate dates to prevent "Invalid Date" errors
|
|
||||||
const startTime =
|
|
||||||
sessionData.startTime instanceof Date &&
|
|
||||||
!isNaN(sessionData.startTime.getTime())
|
|
||||||
? sessionData.startTime
|
|
||||||
: new Date();
|
|
||||||
|
|
||||||
const endTime =
|
|
||||||
session.endTime instanceof Date && !isNaN(session.endTime.getTime())
|
|
||||||
? session.endTime
|
|
||||||
: new Date();
|
|
||||||
|
|
||||||
// Fetch transcript content if URL is available
|
|
||||||
let transcriptContent: string | null = null;
|
|
||||||
if (session.fullTranscriptUrl) {
|
|
||||||
transcriptContent = await fetchTranscriptContent(
|
|
||||||
session.fullTranscriptUrl
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only include fields that are properly typed for Prisma
|
|
||||||
await prisma.session.create({
|
|
||||||
data: {
|
|
||||||
id: sessionData.id,
|
|
||||||
companyId: sessionData.companyId,
|
|
||||||
startTime: startTime,
|
|
||||||
endTime: endTime,
|
|
||||||
ipAddress: session.ipAddress || null,
|
|
||||||
country: session.country || null,
|
|
||||||
language: session.language || null,
|
|
||||||
messagesSent:
|
|
||||||
typeof session.messagesSent === "number" ? session.messagesSent : 0,
|
|
||||||
sentiment:
|
|
||||||
typeof session.sentiment === "number" ? session.sentiment : null,
|
|
||||||
escalated:
|
|
||||||
typeof session.escalated === "boolean" ? session.escalated : null,
|
|
||||||
forwardedHr:
|
|
||||||
typeof session.forwardedHr === "boolean"
|
|
||||||
? session.forwardedHr
|
|
||||||
: null,
|
|
||||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
|
||||||
transcriptContent: transcriptContent, // Add the transcript content
|
|
||||||
avgResponseTime:
|
|
||||||
typeof session.avgResponseTime === "number"
|
|
||||||
? session.avgResponseTime
|
|
||||||
: null,
|
|
||||||
tokens: typeof session.tokens === "number" ? session.tokens : null,
|
|
||||||
tokensEur:
|
|
||||||
typeof session.tokensEur === "number" ? session.tokensEur : null,
|
|
||||||
category: session.category || null,
|
|
||||||
initialMsg: session.initialMsg || null,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({ ok: true, imported: sessions.length });
|
|
||||||
} catch (e) {
|
|
||||||
const error = e instanceof Error ? e.message : "An unknown error occurred";
|
|
||||||
res.status(500).json({ error });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
// API endpoint: update company CSV URL config
|
|
||||||
import { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
import { getServerSession } from "next-auth";
|
|
||||||
import { prisma } from "../../../lib/prisma";
|
|
||||||
import { authOptions } from "../auth/[...nextauth]";
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
const session = await getServerSession(req, res, authOptions);
|
|
||||||
if (!session?.user) return res.status(401).json({ error: "Not logged in" });
|
|
||||||
|
|
||||||
const user = await prisma.user.findUnique({
|
|
||||||
where: { email: session.user.email as string },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) return res.status(401).json({ error: "No user" });
|
|
||||||
|
|
||||||
if (req.method === "POST") {
|
|
||||||
const { csvUrl } = req.body;
|
|
||||||
await prisma.company.update({
|
|
||||||
where: { id: user.companyId },
|
|
||||||
data: { csvUrl },
|
|
||||||
});
|
|
||||||
res.json({ ok: true });
|
|
||||||
} else if (req.method === "GET") {
|
|
||||||
// Get company data
|
|
||||||
const company = await prisma.company.findUnique({
|
|
||||||
where: { id: user.companyId },
|
|
||||||
});
|
|
||||||
res.json({ company });
|
|
||||||
} else {
|
|
||||||
res.status(405).end();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,83 +0,0 @@
|
|||||||
// API endpoint: return metrics for current company
|
|
||||||
import { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
import { getServerSession } from "next-auth";
|
|
||||||
import { prisma } from "../../../lib/prisma";
|
|
||||||
import { sessionMetrics } from "../../../lib/metrics";
|
|
||||||
import { authOptions } from "../auth/[...nextauth]";
|
|
||||||
import { ChatSession } from "../../../lib/types"; // Import ChatSession
|
|
||||||
|
|
||||||
interface SessionUser {
|
|
||||||
email: string;
|
|
||||||
name?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SessionData {
|
|
||||||
user: SessionUser;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
const session = (await getServerSession(
|
|
||||||
req,
|
|
||||||
res,
|
|
||||||
authOptions
|
|
||||||
)) as SessionData | null;
|
|
||||||
if (!session?.user) return res.status(401).json({ error: "Not logged in" });
|
|
||||||
|
|
||||||
const user = await prisma.user.findUnique({
|
|
||||||
where: { email: session.user.email },
|
|
||||||
include: { company: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) return res.status(401).json({ error: "No user" });
|
|
||||||
|
|
||||||
const prismaSessions = await prisma.session.findMany({
|
|
||||||
where: { companyId: user.companyId },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Convert Prisma sessions to ChatSession[] type for sessionMetrics
|
|
||||||
const chatSessions: ChatSession[] = prismaSessions.map((ps) => ({
|
|
||||||
id: ps.id, // Map Prisma's id to ChatSession.id
|
|
||||||
sessionId: ps.id, // Map Prisma's id to ChatSession.sessionId
|
|
||||||
companyId: ps.companyId,
|
|
||||||
startTime: new Date(ps.startTime), // Ensure startTime is a Date object
|
|
||||||
endTime: ps.endTime ? new Date(ps.endTime) : null, // Ensure endTime is a Date object or null
|
|
||||||
transcriptContent: ps.transcriptContent || "", // Ensure transcriptContent is a string
|
|
||||||
createdAt: new Date(ps.createdAt), // Map Prisma's createdAt
|
|
||||||
updatedAt: new Date(ps.createdAt), // Use createdAt for updatedAt as Session model doesn't have updatedAt
|
|
||||||
category: ps.category || undefined,
|
|
||||||
language: ps.language || undefined,
|
|
||||||
country: ps.country || undefined,
|
|
||||||
ipAddress: ps.ipAddress || undefined,
|
|
||||||
sentiment: ps.sentiment === null ? undefined : ps.sentiment,
|
|
||||||
messagesSent: ps.messagesSent === null ? undefined : ps.messagesSent, // Handle null messagesSent
|
|
||||||
avgResponseTime:
|
|
||||||
ps.avgResponseTime === null ? undefined : ps.avgResponseTime,
|
|
||||||
tokens: ps.tokens === null ? undefined : ps.tokens,
|
|
||||||
tokensEur: ps.tokensEur === null ? undefined : ps.tokensEur,
|
|
||||||
escalated: ps.escalated || false,
|
|
||||||
forwardedHr: ps.forwardedHr || false,
|
|
||||||
initialMsg: ps.initialMsg || undefined,
|
|
||||||
fullTranscriptUrl: ps.fullTranscriptUrl || undefined,
|
|
||||||
// userId is missing in Prisma Session model, assuming it's not strictly needed for metrics or can be null
|
|
||||||
userId: undefined, // Or some other default/mapping if available
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Pass company config to metrics
|
|
||||||
const companyConfigForMetrics = {
|
|
||||||
sentimentAlert:
|
|
||||||
user.company.sentimentAlert === null
|
|
||||||
? undefined
|
|
||||||
: user.company.sentimentAlert,
|
|
||||||
};
|
|
||||||
|
|
||||||
const metrics = sessionMetrics(chatSessions, companyConfigForMetrics);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
metrics,
|
|
||||||
csvUrl: user.company.csvUrl,
|
|
||||||
company: user.company,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
import { getServerSession } from "next-auth";
|
|
||||||
import { prisma } from "../../../lib/prisma";
|
|
||||||
import { authOptions } from "../auth/[...nextauth]";
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
const session = await getServerSession(req, res, authOptions);
|
|
||||||
if (!session?.user || session.user.role !== "admin")
|
|
||||||
return res.status(403).json({ error: "Forbidden" });
|
|
||||||
|
|
||||||
const user = await prisma.user.findUnique({
|
|
||||||
where: { email: session.user.email as string },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) return res.status(401).json({ error: "No user" });
|
|
||||||
|
|
||||||
if (req.method === "POST") {
|
|
||||||
const { csvUrl, csvUsername, csvPassword, sentimentThreshold } = req.body;
|
|
||||||
await prisma.company.update({
|
|
||||||
where: { id: user.companyId },
|
|
||||||
data: {
|
|
||||||
csvUrl,
|
|
||||||
csvUsername,
|
|
||||||
...(csvPassword ? { csvPassword } : {}),
|
|
||||||
sentimentAlert: sentimentThreshold
|
|
||||||
? parseFloat(sentimentThreshold)
|
|
||||||
: null,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
res.json({ ok: true });
|
|
||||||
} else {
|
|
||||||
res.status(405).end();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,59 +0,0 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
import crypto from "crypto";
|
|
||||||
import { getServerSession } from "next-auth";
|
|
||||||
import { prisma } from "../../../lib/prisma";
|
|
||||||
import bcrypt from "bcryptjs";
|
|
||||||
import { authOptions } from "../auth/[...nextauth]";
|
|
||||||
// User type from prisma is used instead of the one in lib/types
|
|
||||||
|
|
||||||
interface UserBasicInfo {
|
|
||||||
id: string;
|
|
||||||
email: string;
|
|
||||||
role: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
const session = await getServerSession(req, res, authOptions);
|
|
||||||
if (!session?.user || session.user.role !== "admin")
|
|
||||||
return res.status(403).json({ error: "Forbidden" });
|
|
||||||
|
|
||||||
const user = await prisma.user.findUnique({
|
|
||||||
where: { email: session.user.email as string },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) return res.status(401).json({ error: "No user" });
|
|
||||||
|
|
||||||
if (req.method === "GET") {
|
|
||||||
const users = await prisma.user.findMany({
|
|
||||||
where: { companyId: user.companyId },
|
|
||||||
});
|
|
||||||
|
|
||||||
const mappedUsers: UserBasicInfo[] = users.map((u) => ({
|
|
||||||
id: u.id,
|
|
||||||
email: u.email,
|
|
||||||
role: u.role,
|
|
||||||
}));
|
|
||||||
|
|
||||||
res.json({ users: mappedUsers });
|
|
||||||
} else if (req.method === "POST") {
|
|
||||||
const { email, role } = req.body;
|
|
||||||
if (!email || !role)
|
|
||||||
return res.status(400).json({ error: "Missing fields" });
|
|
||||||
const exists = await prisma.user.findUnique({ where: { email } });
|
|
||||||
if (exists) return res.status(409).json({ error: "Email exists" });
|
|
||||||
const tempPassword = crypto.randomBytes(12).toString("base64").slice(0, 12); // secure random initial password
|
|
||||||
await prisma.user.create({
|
|
||||||
data: {
|
|
||||||
email,
|
|
||||||
password: await bcrypt.hash(tempPassword, 10),
|
|
||||||
companyId: user.companyId,
|
|
||||||
role,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
// TODO: Email user their temp password (stub, for demo) - Implement a robust and secure email sending mechanism. Consider using a transactional email service.
|
|
||||||
res.json({ ok: true, tempPassword });
|
|
||||||
} else res.status(405).end();
|
|
||||||
}
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
import { prisma } from "../../lib/prisma";
|
|
||||||
import { sendEmail } from "../../lib/sendEmail";
|
|
||||||
import crypto from "crypto";
|
|
||||||
import type { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
if (req.method !== "POST") {
|
|
||||||
res.setHeader("Allow", ["POST"]);
|
|
||||||
return res.status(405).end(`Method ${req.method} Not Allowed`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Type the body with a type assertion
|
|
||||||
const { email } = req.body as { email: string };
|
|
||||||
|
|
||||||
const user = await prisma.user.findUnique({ where: { email } });
|
|
||||||
if (!user) return res.status(200).end(); // always 200 for privacy
|
|
||||||
|
|
||||||
const token = crypto.randomBytes(32).toString("hex");
|
|
||||||
const expiry = new Date(Date.now() + 1000 * 60 * 30); // 30 min expiry
|
|
||||||
await prisma.user.update({
|
|
||||||
where: { email },
|
|
||||||
data: { resetToken: token, resetTokenExpiry: expiry },
|
|
||||||
});
|
|
||||||
|
|
||||||
const resetUrl = `${process.env.NEXTAUTH_URL || "http://localhost:3000"}/reset-password?token=${token}`;
|
|
||||||
await sendEmail(email, "Password Reset", `Reset your password: ${resetUrl}`);
|
|
||||||
res.status(200).end();
|
|
||||||
}
|
|
||||||
@ -1,56 +0,0 @@
|
|||||||
import { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
import { prisma } from "../../lib/prisma";
|
|
||||||
import bcrypt from "bcryptjs";
|
|
||||||
import { ApiResponse } from "../../lib/types";
|
|
||||||
|
|
||||||
interface RegisterRequestBody {
|
|
||||||
email: string;
|
|
||||||
password: string;
|
|
||||||
company: string;
|
|
||||||
csvUrl?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse<ApiResponse<{ success: boolean } | { error: string }>>
|
|
||||||
) {
|
|
||||||
if (req.method !== "POST") return res.status(405).end();
|
|
||||||
|
|
||||||
const { email, password, company, csvUrl } = req.body as RegisterRequestBody;
|
|
||||||
|
|
||||||
if (!email || !password || !company) {
|
|
||||||
return res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: "Missing required fields",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if email exists
|
|
||||||
const exists = await prisma.user.findUnique({
|
|
||||||
where: { email },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (exists) {
|
|
||||||
return res.status(409).json({
|
|
||||||
success: false,
|
|
||||||
error: "Email already exists",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const newCompany = await prisma.company.create({
|
|
||||||
data: { name: company, csvUrl: csvUrl || "" },
|
|
||||||
});
|
|
||||||
const hashed = await bcrypt.hash(password, 10);
|
|
||||||
await prisma.user.create({
|
|
||||||
data: {
|
|
||||||
email,
|
|
||||||
password: hashed,
|
|
||||||
companyId: newCompany.id,
|
|
||||||
role: "admin",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
res.status(201).json({
|
|
||||||
success: true,
|
|
||||||
data: { success: true },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@ -1,63 +0,0 @@
|
|||||||
import { prisma } from "../../lib/prisma";
|
|
||||||
import bcrypt from "bcryptjs";
|
|
||||||
import type { NextApiRequest, NextApiResponse } from "next"; // Import official Next.js types
|
|
||||||
|
|
||||||
export default async function handler(
|
|
||||||
req: NextApiRequest, // Use official NextApiRequest
|
|
||||||
res: NextApiResponse // Use official NextApiResponse
|
|
||||||
) {
|
|
||||||
if (req.method !== "POST") {
|
|
||||||
res.setHeader("Allow", ["POST"]); // Good practice to set Allow header for 405
|
|
||||||
return res.status(405).end(`Method ${req.method} Not Allowed`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// It's good practice to explicitly type the expected body for clarity and safety
|
|
||||||
const { token, password } = req.body as { token?: string; password?: string };
|
|
||||||
|
|
||||||
if (!token || !password) {
|
|
||||||
return res.status(400).json({ error: "Token and password are required." });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (password.length < 8) {
|
|
||||||
// Example: Add password complexity rule
|
|
||||||
return res
|
|
||||||
.status(400)
|
|
||||||
.json({ error: "Password must be at least 8 characters long." });
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const user = await prisma.user.findFirst({
|
|
||||||
where: {
|
|
||||||
resetToken: token,
|
|
||||||
resetTokenExpiry: { gte: new Date() },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
return res.status(400).json({
|
|
||||||
error: "Invalid or expired token. Please request a new password reset.",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const hash = await bcrypt.hash(password, 10);
|
|
||||||
await prisma.user.update({
|
|
||||||
where: { id: user.id },
|
|
||||||
data: {
|
|
||||||
password: hash,
|
|
||||||
resetToken: null,
|
|
||||||
resetTokenExpiry: null,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Instead of just res.status(200).end(), send a success message
|
|
||||||
return res
|
|
||||||
.status(200)
|
|
||||||
.json({ message: "Password has been reset successfully." });
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Reset password error:", error); // Log the error for server-side debugging
|
|
||||||
// Provide a generic error message to the client
|
|
||||||
return res.status(500).json({
|
|
||||||
error: "An internal server error occurred. Please try again later.",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
8994
pnpm-lock.yaml
generated
Normal file
8994
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,50 +0,0 @@
|
|||||||
-- CreateTable
|
|
||||||
CREATE TABLE "Company" (
|
|
||||||
"id" TEXT NOT NULL PRIMARY KEY,
|
|
||||||
"name" TEXT NOT NULL,
|
|
||||||
"csvUrl" TEXT NOT NULL,
|
|
||||||
"csvUsername" TEXT,
|
|
||||||
"csvPassword" TEXT,
|
|
||||||
"sentimentAlert" REAL,
|
|
||||||
"dashboardOpts" TEXT,
|
|
||||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" DATETIME NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "User" (
|
|
||||||
"id" TEXT NOT NULL PRIMARY KEY,
|
|
||||||
"email" TEXT NOT NULL,
|
|
||||||
"password" TEXT NOT NULL,
|
|
||||||
"companyId" TEXT NOT NULL,
|
|
||||||
"role" TEXT NOT NULL,
|
|
||||||
"resetToken" TEXT,
|
|
||||||
"resetTokenExpiry" DATETIME,
|
|
||||||
CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "Session" (
|
|
||||||
"id" TEXT NOT NULL PRIMARY KEY,
|
|
||||||
"companyId" TEXT NOT NULL,
|
|
||||||
"startTime" DATETIME NOT NULL,
|
|
||||||
"endTime" DATETIME NOT NULL,
|
|
||||||
"ipAddress" TEXT,
|
|
||||||
"country" TEXT,
|
|
||||||
"language" TEXT,
|
|
||||||
"messagesSent" INTEGER,
|
|
||||||
"sentiment" REAL,
|
|
||||||
"escalated" BOOLEAN,
|
|
||||||
"forwardedHr" BOOLEAN,
|
|
||||||
"fullTranscriptUrl" TEXT,
|
|
||||||
"avgResponseTime" REAL,
|
|
||||||
"tokens" INTEGER,
|
|
||||||
"tokensEur" REAL,
|
|
||||||
"category" TEXT,
|
|
||||||
"initialMsg" TEXT,
|
|
||||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
-- AlterTable
|
|
||||||
ALTER TABLE "Session" ADD COLUMN "transcriptContent" TEXT;
|
|
||||||
@ -0,0 +1,227 @@
|
|||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "UserRole" AS ENUM ('ADMIN', 'USER', 'AUDITOR');
|
||||||
|
|
||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "SentimentCategory" AS ENUM ('POSITIVE', 'NEUTRAL', 'NEGATIVE');
|
||||||
|
|
||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "SessionCategory" AS ENUM ('SCHEDULE_HOURS', 'LEAVE_VACATION', 'SICK_LEAVE_RECOVERY', 'SALARY_COMPENSATION', 'CONTRACT_HOURS', 'ONBOARDING', 'OFFBOARDING', 'WORKWEAR_STAFF_PASS', 'TEAM_CONTACTS', 'PERSONAL_QUESTIONS', 'ACCESS_LOGIN', 'SOCIAL_QUESTIONS', 'UNRECOGNIZED_OTHER');
|
||||||
|
|
||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "ImportStatus" AS ENUM ('QUEUED', 'PROCESSING', 'DONE', 'ERROR');
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Company" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"csvUrl" TEXT NOT NULL,
|
||||||
|
"csvUsername" TEXT,
|
||||||
|
"csvPassword" TEXT,
|
||||||
|
"sentimentAlert" DOUBLE PRECISION,
|
||||||
|
"dashboardOpts" JSONB,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "Company_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "User" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"email" TEXT NOT NULL,
|
||||||
|
"password" TEXT NOT NULL,
|
||||||
|
"role" "UserRole" NOT NULL DEFAULT 'USER',
|
||||||
|
"companyId" TEXT NOT NULL,
|
||||||
|
"resetToken" TEXT,
|
||||||
|
"resetTokenExpiry" TIMESTAMP(3),
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Session" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"companyId" TEXT NOT NULL,
|
||||||
|
"importId" TEXT,
|
||||||
|
"startTime" TIMESTAMP(3) NOT NULL,
|
||||||
|
"endTime" TIMESTAMP(3) NOT NULL,
|
||||||
|
"ipAddress" TEXT,
|
||||||
|
"country" TEXT,
|
||||||
|
"fullTranscriptUrl" TEXT,
|
||||||
|
"avgResponseTime" DOUBLE PRECISION,
|
||||||
|
"initialMsg" TEXT,
|
||||||
|
"language" TEXT,
|
||||||
|
"messagesSent" INTEGER,
|
||||||
|
"sentiment" "SentimentCategory",
|
||||||
|
"escalated" BOOLEAN,
|
||||||
|
"forwardedHr" BOOLEAN,
|
||||||
|
"category" "SessionCategory",
|
||||||
|
"summary" TEXT,
|
||||||
|
"processed" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "Session_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "SessionImport" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"companyId" TEXT NOT NULL,
|
||||||
|
"externalSessionId" TEXT NOT NULL,
|
||||||
|
"startTimeRaw" TEXT NOT NULL,
|
||||||
|
"endTimeRaw" TEXT NOT NULL,
|
||||||
|
"ipAddress" TEXT,
|
||||||
|
"countryCode" TEXT,
|
||||||
|
"language" TEXT,
|
||||||
|
"messagesSent" INTEGER,
|
||||||
|
"sentimentRaw" TEXT,
|
||||||
|
"escalatedRaw" TEXT,
|
||||||
|
"forwardedHrRaw" TEXT,
|
||||||
|
"fullTranscriptUrl" TEXT,
|
||||||
|
"avgResponseTimeSeconds" DOUBLE PRECISION,
|
||||||
|
"tokens" INTEGER,
|
||||||
|
"tokensEur" DOUBLE PRECISION,
|
||||||
|
"category" TEXT,
|
||||||
|
"initialMessage" TEXT,
|
||||||
|
"rawTranscriptContent" TEXT,
|
||||||
|
"status" "ImportStatus" NOT NULL DEFAULT 'QUEUED',
|
||||||
|
"errorMsg" TEXT,
|
||||||
|
"processedAt" TIMESTAMP(3),
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "SessionImport_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Message" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"sessionId" TEXT NOT NULL,
|
||||||
|
"timestamp" TIMESTAMP(3),
|
||||||
|
"role" TEXT NOT NULL,
|
||||||
|
"content" TEXT NOT NULL,
|
||||||
|
"order" INTEGER NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "Message_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Question" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"content" TEXT NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "Question_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "SessionQuestion" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"sessionId" TEXT NOT NULL,
|
||||||
|
"questionId" TEXT NOT NULL,
|
||||||
|
"order" INTEGER NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "SessionQuestion_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "AIProcessingRequest" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"sessionId" TEXT NOT NULL,
|
||||||
|
"openaiRequestId" TEXT,
|
||||||
|
"model" TEXT NOT NULL,
|
||||||
|
"serviceTier" TEXT,
|
||||||
|
"systemFingerprint" TEXT,
|
||||||
|
"promptTokens" INTEGER NOT NULL,
|
||||||
|
"completionTokens" INTEGER NOT NULL,
|
||||||
|
"totalTokens" INTEGER NOT NULL,
|
||||||
|
"cachedTokens" INTEGER,
|
||||||
|
"audioTokensPrompt" INTEGER,
|
||||||
|
"reasoningTokens" INTEGER,
|
||||||
|
"audioTokensCompletion" INTEGER,
|
||||||
|
"acceptedPredictionTokens" INTEGER,
|
||||||
|
"rejectedPredictionTokens" INTEGER,
|
||||||
|
"promptTokenCost" DOUBLE PRECISION NOT NULL,
|
||||||
|
"completionTokenCost" DOUBLE PRECISION NOT NULL,
|
||||||
|
"totalCostEur" DOUBLE PRECISION NOT NULL,
|
||||||
|
"processingType" TEXT NOT NULL,
|
||||||
|
"success" BOOLEAN NOT NULL,
|
||||||
|
"errorMessage" TEXT,
|
||||||
|
"requestedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"completedAt" TIMESTAMP(3),
|
||||||
|
|
||||||
|
CONSTRAINT "AIProcessingRequest_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "Session_importId_key" ON "Session"("importId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "Session_companyId_startTime_idx" ON "Session"("companyId", "startTime");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SessionImport_externalSessionId_key" ON "SessionImport"("externalSessionId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "SessionImport_status_idx" ON "SessionImport"("status");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SessionImport_companyId_externalSessionId_key" ON "SessionImport"("companyId", "externalSessionId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "Message_sessionId_order_idx" ON "Message"("sessionId", "order");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "Message_sessionId_order_key" ON "Message"("sessionId", "order");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "Question_content_key" ON "Question"("content");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "SessionQuestion_sessionId_idx" ON "SessionQuestion"("sessionId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SessionQuestion_sessionId_questionId_key" ON "SessionQuestion"("sessionId", "questionId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SessionQuestion_sessionId_order_key" ON "SessionQuestion"("sessionId", "order");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "AIProcessingRequest_sessionId_idx" ON "AIProcessingRequest"("sessionId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "AIProcessingRequest_requestedAt_idx" ON "AIProcessingRequest"("requestedAt");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "AIProcessingRequest_model_idx" ON "AIProcessingRequest"("model");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "User" ADD CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Session" ADD CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Session" ADD CONSTRAINT "Session_importId_fkey" FOREIGN KEY ("importId") REFERENCES "SessionImport"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "SessionImport" ADD CONSTRAINT "SessionImport_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Message" ADD CONSTRAINT "Message_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "SessionQuestion" ADD CONSTRAINT "SessionQuestion_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "SessionQuestion" ADD CONSTRAINT "SessionQuestion_questionId_fkey" FOREIGN KEY ("questionId") REFERENCES "Question"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "AIProcessingRequest" ADD CONSTRAINT "AIProcessingRequest_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@ -0,0 +1,63 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "AIModel" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"provider" TEXT NOT NULL,
|
||||||
|
"maxTokens" INTEGER,
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "AIModel_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "AIModelPricing" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"aiModelId" TEXT NOT NULL,
|
||||||
|
"promptTokenCost" DOUBLE PRECISION NOT NULL,
|
||||||
|
"completionTokenCost" DOUBLE PRECISION NOT NULL,
|
||||||
|
"effectiveFrom" TIMESTAMP(3) NOT NULL,
|
||||||
|
"effectiveUntil" TIMESTAMP(3),
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "AIModelPricing_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "CompanyAIModel" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"companyId" TEXT NOT NULL,
|
||||||
|
"aiModelId" TEXT NOT NULL,
|
||||||
|
"isDefault" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "CompanyAIModel_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "AIModel_name_key" ON "AIModel"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "AIModel_provider_isActive_idx" ON "AIModel"("provider", "isActive");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "AIModelPricing_aiModelId_effectiveFrom_idx" ON "AIModelPricing"("aiModelId", "effectiveFrom");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "AIModelPricing_effectiveFrom_effectiveUntil_idx" ON "AIModelPricing"("effectiveFrom", "effectiveUntil");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "CompanyAIModel_companyId_isDefault_idx" ON "CompanyAIModel"("companyId", "isDefault");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "CompanyAIModel_companyId_aiModelId_key" ON "CompanyAIModel"("companyId", "aiModelId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "AIModelPricing" ADD CONSTRAINT "AIModelPricing_aiModelId_fkey" FOREIGN KEY ("aiModelId") REFERENCES "AIModel"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "CompanyAIModel" ADD CONSTRAINT "CompanyAIModel_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "CompanyAIModel" ADD CONSTRAINT "CompanyAIModel_aiModelId_fkey" FOREIGN KEY ("aiModelId") REFERENCES "AIModel"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@ -1,3 +1,3 @@
|
|||||||
# Please do not edit this file manually
|
# Please do not edit this file manually
|
||||||
# It should be added in your version-control system (e.g., Git)
|
# It should be added in your version-control system (e.g., Git)
|
||||||
provider = "sqlite"
|
provider = "postgresql"
|
||||||
|
|||||||
@ -1,57 +1,376 @@
|
|||||||
// Database schema, one company = one org, linked to users and CSV config
|
|
||||||
generator client {
|
generator client {
|
||||||
provider = "prisma-client-js"
|
provider = "prisma-client-js"
|
||||||
|
previewFeatures = ["driverAdapters"]
|
||||||
}
|
}
|
||||||
|
|
||||||
datasource db {
|
datasource db {
|
||||||
provider = "sqlite"
|
provider = "postgresql"
|
||||||
url = "file:./dev.db"
|
url = env("DATABASE_URL")
|
||||||
|
directUrl = env("DATABASE_URL_DIRECT")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ENUMS – fewer magic strings
|
||||||
|
*/
|
||||||
|
enum UserRole {
|
||||||
|
ADMIN
|
||||||
|
USER
|
||||||
|
AUDITOR
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SentimentCategory {
|
||||||
|
POSITIVE
|
||||||
|
NEUTRAL
|
||||||
|
NEGATIVE
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SessionCategory {
|
||||||
|
SCHEDULE_HOURS
|
||||||
|
LEAVE_VACATION
|
||||||
|
SICK_LEAVE_RECOVERY
|
||||||
|
SALARY_COMPENSATION
|
||||||
|
CONTRACT_HOURS
|
||||||
|
ONBOARDING
|
||||||
|
OFFBOARDING
|
||||||
|
WORKWEAR_STAFF_PASS
|
||||||
|
TEAM_CONTACTS
|
||||||
|
PERSONAL_QUESTIONS
|
||||||
|
ACCESS_LOGIN
|
||||||
|
SOCIAL_QUESTIONS
|
||||||
|
UNRECOGNIZED_OTHER
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ProcessingStage {
|
||||||
|
CSV_IMPORT // SessionImport created
|
||||||
|
TRANSCRIPT_FETCH // Transcript content fetched
|
||||||
|
SESSION_CREATION // Session + Messages created
|
||||||
|
AI_ANALYSIS // AI processing completed
|
||||||
|
QUESTION_EXTRACTION // Questions extracted
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ProcessingStatus {
|
||||||
|
PENDING
|
||||||
|
IN_PROGRESS
|
||||||
|
COMPLETED
|
||||||
|
FAILED
|
||||||
|
SKIPPED
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* COMPANY (multi-tenant root)
|
||||||
|
*/
|
||||||
model Company {
|
model Company {
|
||||||
id String @id @default(uuid())
|
id String @id @default(uuid())
|
||||||
name String
|
name String
|
||||||
csvUrl String // where to fetch CSV
|
csvUrl String
|
||||||
csvUsername String? // for basic auth
|
csvUsername String?
|
||||||
csvPassword String?
|
csvPassword String?
|
||||||
sentimentAlert Float? // e.g. alert threshold for negative chats
|
sentimentAlert Float?
|
||||||
dashboardOpts String? // JSON blob for per-company dashboard preferences
|
dashboardOpts Json? // JSON column instead of opaque string
|
||||||
users User[]
|
|
||||||
sessions Session[]
|
users User[] @relation("CompanyUsers")
|
||||||
createdAt DateTime @default(now())
|
sessions Session[]
|
||||||
updatedAt DateTime @updatedAt
|
imports SessionImport[]
|
||||||
|
companyAiModels CompanyAIModel[]
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* USER (auth accounts)
|
||||||
|
*/
|
||||||
model User {
|
model User {
|
||||||
id String @id @default(uuid())
|
id String @id @default(uuid())
|
||||||
email String @unique
|
email String @unique
|
||||||
password String // hashed, use bcrypt
|
password String
|
||||||
company Company @relation(fields: [companyId], references: [id])
|
role UserRole @default(USER)
|
||||||
|
|
||||||
|
company Company @relation("CompanyUsers", fields: [companyId], references: [id], onDelete: Cascade)
|
||||||
companyId String
|
companyId String
|
||||||
role String // 'admin' | 'user' | 'auditor'
|
|
||||||
resetToken String?
|
resetToken String?
|
||||||
resetTokenExpiry DateTime?
|
resetTokenExpiry DateTime?
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SESSION ↔ SESSIONIMPORT (1-to-1)
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 1. Normalised session ---------------------------
|
||||||
|
*/
|
||||||
model Session {
|
model Session {
|
||||||
id String @id
|
id String @id @default(uuid())
|
||||||
company Company @relation(fields: [companyId], references: [id])
|
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
|
||||||
companyId String
|
companyId String
|
||||||
startTime DateTime
|
|
||||||
endTime DateTime
|
/**
|
||||||
ipAddress String?
|
* 1-to-1 link back to the import row
|
||||||
country String?
|
*/
|
||||||
language String?
|
import SessionImport? @relation("ImportToSession", fields: [importId], references: [id])
|
||||||
messagesSent Int?
|
importId String? @unique
|
||||||
sentiment Float?
|
|
||||||
escalated Boolean?
|
/**
|
||||||
forwardedHr Boolean?
|
* session-level data (processed from SessionImport)
|
||||||
fullTranscriptUrl String?
|
*/
|
||||||
transcriptContent String? // Added to store the fetched transcript
|
startTime DateTime
|
||||||
avgResponseTime Float?
|
endTime DateTime
|
||||||
tokens Int?
|
|
||||||
tokensEur Float?
|
// Direct copies from SessionImport (minimal processing)
|
||||||
category String?
|
ipAddress String?
|
||||||
initialMsg String?
|
country String? // from countryCode
|
||||||
createdAt DateTime @default(now())
|
fullTranscriptUrl String?
|
||||||
|
avgResponseTime Float? // from avgResponseTimeSeconds
|
||||||
|
initialMsg String? // from initialMessage
|
||||||
|
|
||||||
|
// AI-processed fields (calculated from Messages or AI analysis)
|
||||||
|
language String? // AI-detected from Messages
|
||||||
|
messagesSent Int? // Calculated from Message count
|
||||||
|
sentiment SentimentCategory? // AI-analyzed (changed from Float to enum)
|
||||||
|
escalated Boolean? // AI-detected
|
||||||
|
forwardedHr Boolean? // AI-detected
|
||||||
|
category SessionCategory? // AI-categorized (changed to enum)
|
||||||
|
|
||||||
|
// AI-generated fields
|
||||||
|
summary String? // AI-generated summary
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Relationships
|
||||||
|
*/
|
||||||
|
messages Message[] // Individual conversation messages
|
||||||
|
sessionQuestions SessionQuestion[] // Questions asked in this session
|
||||||
|
aiProcessingRequests AIProcessingRequest[] // AI processing cost tracking
|
||||||
|
processingStatus SessionProcessingStatus[] // Processing pipeline status
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
|
@@index([companyId, startTime])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 2. Raw CSV row (pure data storage) ----------
|
||||||
|
*/
|
||||||
|
model SessionImport {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
|
||||||
|
companyId String
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 1-to-1 back-relation; NO fields/references here
|
||||||
|
*/
|
||||||
|
session Session? @relation("ImportToSession")
|
||||||
|
|
||||||
|
// ─── 16 CSV columns 1-to-1 ────────────────────────
|
||||||
|
externalSessionId String @unique // value from CSV column 1
|
||||||
|
startTimeRaw String
|
||||||
|
endTimeRaw String
|
||||||
|
ipAddress String?
|
||||||
|
countryCode String?
|
||||||
|
language String?
|
||||||
|
messagesSent Int?
|
||||||
|
sentimentRaw String?
|
||||||
|
escalatedRaw String?
|
||||||
|
forwardedHrRaw String?
|
||||||
|
fullTranscriptUrl String?
|
||||||
|
avgResponseTimeSeconds Float?
|
||||||
|
tokens Int?
|
||||||
|
tokensEur Float?
|
||||||
|
category String?
|
||||||
|
initialMessage String?
|
||||||
|
|
||||||
|
// ─── Raw transcript content ─────────────────────────
|
||||||
|
rawTranscriptContent String? // Fetched content from fullTranscriptUrl
|
||||||
|
|
||||||
|
// ─── bookkeeping ─────────────────────────────────
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
@@unique([companyId, externalSessionId]) // idempotent re-imports
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MESSAGE (individual lines)
|
||||||
|
*/
|
||||||
|
model Message {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
|
||||||
|
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||||
|
sessionId String
|
||||||
|
|
||||||
|
timestamp DateTime?
|
||||||
|
role String // "user" | "assistant" | "system" – free-form keeps migration easy
|
||||||
|
content String
|
||||||
|
order Int
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
@@unique([sessionId, order]) // guards against duplicate order values
|
||||||
|
@@index([sessionId, order])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UNIFIED PROCESSING STATUS TRACKING
|
||||||
|
*/
|
||||||
|
model SessionProcessingStatus {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
sessionId String
|
||||||
|
stage ProcessingStage
|
||||||
|
status ProcessingStatus @default(PENDING)
|
||||||
|
|
||||||
|
startedAt DateTime?
|
||||||
|
completedAt DateTime?
|
||||||
|
errorMessage String?
|
||||||
|
retryCount Int @default(0)
|
||||||
|
|
||||||
|
// Stage-specific metadata (e.g., AI costs, token usage, fetch details)
|
||||||
|
metadata Json?
|
||||||
|
|
||||||
|
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([sessionId, stage])
|
||||||
|
@@index([stage, status])
|
||||||
|
@@index([sessionId])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* QUESTION MANAGEMENT (separate from Session for better analytics)
|
||||||
|
*/
|
||||||
|
model Question {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
content String @unique // The actual question text
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
sessionQuestions SessionQuestion[]
|
||||||
|
}
|
||||||
|
|
||||||
|
model SessionQuestion {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
sessionId String
|
||||||
|
questionId String
|
||||||
|
order Int // Order within the session
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||||
|
question Question @relation(fields: [questionId], references: [id])
|
||||||
|
|
||||||
|
@@unique([sessionId, questionId]) // Prevent duplicate questions per session
|
||||||
|
@@unique([sessionId, order]) // Ensure unique ordering
|
||||||
|
@@index([sessionId])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AI PROCESSING COST TRACKING
|
||||||
|
*/
|
||||||
|
model AIProcessingRequest {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
sessionId String
|
||||||
|
|
||||||
|
// OpenAI Request Details
|
||||||
|
openaiRequestId String? // "chatcmpl-Bn8IH9UM8t7luZVWnwZG7CVJ0kjPo"
|
||||||
|
model String // "gpt-4o-2024-08-06"
|
||||||
|
serviceTier String? // "default"
|
||||||
|
systemFingerprint String? // "fp_07871e2ad8"
|
||||||
|
|
||||||
|
// Token Usage (from usage object)
|
||||||
|
promptTokens Int // 11
|
||||||
|
completionTokens Int // 9
|
||||||
|
totalTokens Int // 20
|
||||||
|
|
||||||
|
// Detailed Token Breakdown
|
||||||
|
cachedTokens Int? // prompt_tokens_details.cached_tokens
|
||||||
|
audioTokensPrompt Int? // prompt_tokens_details.audio_tokens
|
||||||
|
reasoningTokens Int? // completion_tokens_details.reasoning_tokens
|
||||||
|
audioTokensCompletion Int? // completion_tokens_details.audio_tokens
|
||||||
|
acceptedPredictionTokens Int? // completion_tokens_details.accepted_prediction_tokens
|
||||||
|
rejectedPredictionTokens Int? // completion_tokens_details.rejected_prediction_tokens
|
||||||
|
|
||||||
|
// Cost Calculation
|
||||||
|
promptTokenCost Float // Cost per prompt token (varies by model)
|
||||||
|
completionTokenCost Float // Cost per completion token (varies by model)
|
||||||
|
totalCostEur Float // Calculated total cost in EUR
|
||||||
|
|
||||||
|
// Processing Context
|
||||||
|
processingType String // "session_analysis", "reprocessing", etc.
|
||||||
|
success Boolean // Whether the request succeeded
|
||||||
|
errorMessage String? // If failed, what went wrong
|
||||||
|
|
||||||
|
// Timestamps
|
||||||
|
requestedAt DateTime @default(now())
|
||||||
|
completedAt DateTime?
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@index([sessionId])
|
||||||
|
@@index([requestedAt])
|
||||||
|
@@index([model])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AI MODEL MANAGEMENT SYSTEM
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AI Model definitions (without pricing)
|
||||||
|
*/
|
||||||
|
model AIModel {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
name String @unique // "gpt-4o", "gpt-4-turbo", etc.
|
||||||
|
provider String // "openai", "anthropic", etc.
|
||||||
|
maxTokens Int? // Maximum tokens for this model
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
pricing AIModelPricing[]
|
||||||
|
companyModels CompanyAIModel[]
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
|
@@index([provider, isActive])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Time-based pricing for AI models
|
||||||
|
*/
|
||||||
|
model AIModelPricing {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
aiModelId String
|
||||||
|
promptTokenCost Float // Cost per prompt token in USD
|
||||||
|
completionTokenCost Float // Cost per completion token in USD
|
||||||
|
effectiveFrom DateTime // When this pricing becomes effective
|
||||||
|
effectiveUntil DateTime? // When this pricing expires (null = current)
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
@@index([aiModelId, effectiveFrom])
|
||||||
|
@@index([effectiveFrom, effectiveUntil])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Company-specific AI model assignments
|
||||||
|
*/
|
||||||
|
model CompanyAIModel {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
companyId String
|
||||||
|
aiModelId String
|
||||||
|
isDefault Boolean @default(false) // Is this the default model for the company?
|
||||||
|
|
||||||
|
// Relationships
|
||||||
|
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
|
||||||
|
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
@@unique([companyId, aiModelId]) // Prevent duplicate assignments
|
||||||
|
@@index([companyId, isDefault])
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,39 +0,0 @@
|
|||||||
// seed.js - Create initial admin user and company
|
|
||||||
import { PrismaClient } from "@prisma/client";
|
|
||||||
import bcrypt from "bcryptjs";
|
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
// Create a company
|
|
||||||
const company = await prisma.company.create({
|
|
||||||
data: {
|
|
||||||
name: "Demo Company",
|
|
||||||
csvUrl: "https://example.com/data.csv", // Replace with a real URL if available
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create an admin user
|
|
||||||
const hashedPassword = await bcrypt.hash("admin123", 10);
|
|
||||||
await prisma.user.create({
|
|
||||||
data: {
|
|
||||||
email: "admin@demo.com",
|
|
||||||
password: hashedPassword,
|
|
||||||
role: "admin",
|
|
||||||
companyId: company.id,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log("Seed data created successfully:");
|
|
||||||
console.log("Company: Demo Company");
|
|
||||||
console.log("Admin user: admin@demo.com (password: admin123)");
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
||||||
.catch((e) => {
|
|
||||||
console.error("Error seeding database:", e);
|
|
||||||
process.exit(1);
|
|
||||||
})
|
|
||||||
.finally(async () => {
|
|
||||||
await prisma.$disconnect();
|
|
||||||
});
|
|
||||||
@ -1,39 +0,0 @@
|
|||||||
// Seed script for creating initial data
|
|
||||||
import { PrismaClient } from "@prisma/client";
|
|
||||||
import bcrypt from "bcryptjs";
|
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
try {
|
|
||||||
// Create a company
|
|
||||||
const company = await prisma.company.create({
|
|
||||||
data: {
|
|
||||||
name: "Demo Company",
|
|
||||||
csvUrl: "https://example.com/data.csv", // Replace with a real URL if available
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create an admin user
|
|
||||||
const hashedPassword = await bcrypt.hash("admin123", 10);
|
|
||||||
await prisma.user.create({
|
|
||||||
data: {
|
|
||||||
email: "admin@demo.com",
|
|
||||||
password: hashedPassword,
|
|
||||||
role: "admin",
|
|
||||||
companyId: company.id,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log("Seed data created successfully:");
|
|
||||||
console.log("Company: Demo Company");
|
|
||||||
console.log("Admin user: admin@demo.com (password: admin123)");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error seeding database:", error);
|
|
||||||
process.exit(1);
|
|
||||||
} finally {
|
|
||||||
await prisma.$disconnect();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
||||||
129
prisma/seed.ts
129
prisma/seed.ts
@ -1,4 +1,4 @@
|
|||||||
// seed.ts - Create initial admin user and company
|
// seed.ts - Create initial admin user, company, and AI models
|
||||||
import { PrismaClient } from "@prisma/client";
|
import { PrismaClient } from "@prisma/client";
|
||||||
import bcrypt from "bcryptjs";
|
import bcrypt from "bcryptjs";
|
||||||
|
|
||||||
@ -6,30 +6,133 @@ const prisma = new PrismaClient();
|
|||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
try {
|
try {
|
||||||
// Create a company
|
console.log("🌱 Starting database seeding...");
|
||||||
|
|
||||||
|
// Create the Jumbo company
|
||||||
const company = await prisma.company.create({
|
const company = await prisma.company.create({
|
||||||
data: {
|
data: {
|
||||||
name: "Demo Company",
|
name: "Jumbo Bas Bobbeldijk",
|
||||||
csvUrl: "https://example.com/data.csv", // Replace with a real URL if available
|
csvUrl: "https://proto.notso.ai/jumbo/chats",
|
||||||
|
csvUsername: "jumboadmin",
|
||||||
|
csvPassword: "jumboadmin",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
console.log(`✅ Created company: ${company.name}`);
|
||||||
|
|
||||||
// Create an admin user
|
// Create admin user
|
||||||
const hashedPassword = await bcrypt.hash("admin123", 10);
|
const hashedPassword = await bcrypt.hash("8QbL26tB7fWS", 10);
|
||||||
await prisma.user.create({
|
const adminUser = await prisma.user.create({
|
||||||
data: {
|
data: {
|
||||||
email: "admin@demo.com",
|
email: "max.kowalski.contact@gmail.com",
|
||||||
password: hashedPassword,
|
password: hashedPassword,
|
||||||
role: "admin",
|
role: "ADMIN",
|
||||||
companyId: company.id,
|
companyId: company.id,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
console.log(`✅ Created admin user: ${adminUser.email}`);
|
||||||
|
|
||||||
|
// Create AI Models
|
||||||
|
const aiModels = [
|
||||||
|
{
|
||||||
|
name: "gpt-4o",
|
||||||
|
provider: "openai",
|
||||||
|
maxTokens: 128000,
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "gpt-4o-2024-08-06",
|
||||||
|
provider: "openai",
|
||||||
|
maxTokens: 128000,
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "gpt-4-turbo",
|
||||||
|
provider: "openai",
|
||||||
|
maxTokens: 128000,
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "gpt-4o-mini",
|
||||||
|
provider: "openai",
|
||||||
|
maxTokens: 128000,
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const createdModels: any[] = [];
|
||||||
|
for (const modelData of aiModels) {
|
||||||
|
const model = await prisma.aIModel.create({
|
||||||
|
data: modelData,
|
||||||
|
});
|
||||||
|
createdModels.push(model);
|
||||||
|
console.log(`✅ Created AI model: ${model.name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create current pricing for AI models (as of December 2024)
|
||||||
|
const currentTime = new Date();
|
||||||
|
const pricingData = [
|
||||||
|
{
|
||||||
|
modelName: "gpt-4o",
|
||||||
|
promptTokenCost: 0.0000025, // $2.50 per 1M tokens
|
||||||
|
completionTokenCost: 0.00001, // $10.00 per 1M tokens
|
||||||
|
},
|
||||||
|
{
|
||||||
|
modelName: "gpt-4o-2024-08-06",
|
||||||
|
promptTokenCost: 0.0000025, // $2.50 per 1M tokens
|
||||||
|
completionTokenCost: 0.00001, // $10.00 per 1M tokens
|
||||||
|
},
|
||||||
|
{
|
||||||
|
modelName: "gpt-4-turbo",
|
||||||
|
promptTokenCost: 0.00001, // $10.00 per 1M tokens
|
||||||
|
completionTokenCost: 0.00003, // $30.00 per 1M tokens
|
||||||
|
},
|
||||||
|
{
|
||||||
|
modelName: "gpt-4o-mini",
|
||||||
|
promptTokenCost: 0.00000015, // $0.15 per 1M tokens
|
||||||
|
completionTokenCost: 0.0000006, // $0.60 per 1M tokens
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const pricing of pricingData) {
|
||||||
|
const model = createdModels.find(m => m.name === pricing.modelName);
|
||||||
|
if (model) {
|
||||||
|
await prisma.aIModelPricing.create({
|
||||||
|
data: {
|
||||||
|
aiModelId: model.id,
|
||||||
|
promptTokenCost: pricing.promptTokenCost,
|
||||||
|
completionTokenCost: pricing.completionTokenCost,
|
||||||
|
effectiveFrom: currentTime,
|
||||||
|
effectiveUntil: null, // Current pricing
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(`✅ Created pricing for: ${model.name}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign default AI model to company (gpt-4o)
|
||||||
|
const defaultModel = createdModels.find(m => m.name === "gpt-4o");
|
||||||
|
if (defaultModel) {
|
||||||
|
await prisma.companyAIModel.create({
|
||||||
|
data: {
|
||||||
|
companyId: company.id,
|
||||||
|
aiModelId: defaultModel.id,
|
||||||
|
isDefault: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(`✅ Set default AI model for company: ${defaultModel.name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("\n🎉 Database seeding completed successfully!");
|
||||||
|
console.log("\n📋 Summary:");
|
||||||
|
console.log(`Company: ${company.name}`);
|
||||||
|
console.log(`Admin user: ${adminUser.email}`);
|
||||||
|
console.log(`Password: 8QbL26tB7fWS`);
|
||||||
|
console.log(`AI Models: ${createdModels.length} models created with current pricing`);
|
||||||
|
console.log(`Default model: ${defaultModel?.name}`);
|
||||||
|
console.log("\n🚀 Ready to start importing CSV data!");
|
||||||
|
|
||||||
console.log("Seed data created successfully:");
|
|
||||||
console.log("Company: Demo Company");
|
|
||||||
console.log("Admin user: admin@demo.com (password: admin123)");
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error seeding database:", error);
|
console.error("❌ Error seeding database:", error);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
} finally {
|
} finally {
|
||||||
await prisma.$disconnect();
|
await prisma.$disconnect();
|
||||||
|
|||||||
6
sample-csv-import-file.csv
Normal file
6
sample-csv-import-file.csv
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
a068d62a-439b-4d70-924f-3d45ffba673b,30.04.2025 11:38:14,30.04.2025 11:38:14,31.176.221.57,BA,,1,,,,https://proto.notso.ai/jumbo/chats/a068d62a-439b-4d70-924f-3d45ffba673b.txt,2.051,6470,0.0009,,test
|
||||||
|
284c6849-51ba-41b8-8afd-c1a70e7bd997,30.04.2025 11:41:48,30.04.2025 11:41:48,31.176.221.57,BA,english,1,happy,no,no,https://proto.notso.ai/jumbo/chats/284c6849-51ba-41b8-8afd-c1a70e7bd997.txt,3.977,6537,0.0010,Greeting,Good day
|
||||||
|
ef6b43f6-e46f-4d6c-9bf7-3d8f3b658d40,01.05.2025 12:11:18,01.05.2025 12:14:53,31.176.221.57,BA,Dutch,8,excited,no,no,https://proto.notso.ai/jumbo/chats/ef6b43f6-e46f-4d6c-9bf7-3d8f3b658d40.txt,3.458,56027,0.0083,Onboarding,whats up
|
||||||
|
e5c6d4d1-7a02-4c0e-9d93-214ea06d6764,01.05.2025 12:37:43,01.05.2025 12:37:43,31.176.221.57,BA,turkish,1,happy,no,no,https://proto.notso.ai/jumbo/chats/e5c6d4d1-7a02-4c0e-9d93-214ea06d6764.txt,3.004,6549,0.0010,Language inquiry,Spreek je ook turks?
|
||||||
|
461086bd-bac0-496b-a541-d76468b96f44,01.05.2025 12:48:13,01.05.2025 12:48:21,31.176.221.57,BA,dutch,2,happy,no,no,https://proto.notso.ai/jumbo/chats/461086bd-bac0-496b-a541-d76468b96f44.txt,2.442,13220,0.0020,General,Lalalaposie
|
||||||
|
689ae197-2005-4f09-b993-d9f6fa16fc1f,01.05.2025 12:52:07,01.05.2025 12:57:14,31.176.221.57,BA,,3,,,,https://proto.notso.ai/jumbo/chats/689ae197-2005-4f09-b993-d9f6fa16fc1f.txt,1.487,19751,0.0029,,hi liza
|
||||||
|
@ -1,83 +1,182 @@
|
|||||||
import { PrismaClient } from "@prisma/client";
|
import { PrismaClient } from "@prisma/client";
|
||||||
|
import fetch from "node-fetch";
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
async function main() {
|
/**
|
||||||
console.log("Starting to fetch missing transcripts...");
|
* Fetches transcript content from a URL with optional authentication
|
||||||
|
*/
|
||||||
|
async function fetchTranscriptContent(
|
||||||
|
url: string,
|
||||||
|
username?: string,
|
||||||
|
password?: string
|
||||||
|
): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const authHeader =
|
||||||
|
username && password
|
||||||
|
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||||
|
: undefined;
|
||||||
|
|
||||||
const sessionsToUpdate = await prisma.session.findMany({
|
const response = await fetch(url, {
|
||||||
|
headers: authHeader ? { Authorization: authHeader } : {},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
console.warn(`Failed to fetch transcript from ${url}: ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await response.text();
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Error fetching transcript from ${url}:`, error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse transcript content into individual messages
|
||||||
|
*/
|
||||||
|
function parseTranscriptToMessages(transcriptContent: string): Array<{
|
||||||
|
timestamp: Date | null;
|
||||||
|
role: string;
|
||||||
|
content: string;
|
||||||
|
order: number;
|
||||||
|
}> {
|
||||||
|
const lines = transcriptContent.split('\n').filter(line => line.trim());
|
||||||
|
const messages: Array<{
|
||||||
|
timestamp: Date | null;
|
||||||
|
role: string;
|
||||||
|
content: string;
|
||||||
|
order: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
let order = 0;
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
// Try to parse lines in format: [timestamp] role: content
|
||||||
|
const match = line.match(/^\[([^\]]+)\]\s*([^:]+):\s*(.+)$/);
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
const [, timestampStr, role, content] = match;
|
||||||
|
|
||||||
|
// Try to parse the timestamp
|
||||||
|
let timestamp: Date | null = null;
|
||||||
|
try {
|
||||||
|
timestamp = new Date(timestampStr);
|
||||||
|
if (isNaN(timestamp.getTime())) {
|
||||||
|
timestamp = null;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
timestamp = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.push({
|
||||||
|
timestamp,
|
||||||
|
role: role.trim(),
|
||||||
|
content: content.trim(),
|
||||||
|
order: order++,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// If line doesn't match expected format, treat as content continuation
|
||||||
|
if (messages.length > 0) {
|
||||||
|
messages[messages.length - 1].content += '\n' + line;
|
||||||
|
} else {
|
||||||
|
// First line doesn't match format, create a generic message
|
||||||
|
messages.push({
|
||||||
|
timestamp: null,
|
||||||
|
role: 'unknown',
|
||||||
|
content: line,
|
||||||
|
order: order++,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main function to fetch transcripts for sessions that don't have messages yet
|
||||||
|
*/
|
||||||
|
async function fetchTranscriptsForSessions() {
|
||||||
|
console.log("Starting to fetch transcripts for sessions without messages...");
|
||||||
|
|
||||||
|
// Find sessions that have transcript URLs but no messages
|
||||||
|
const sessionsNeedingTranscripts = await prisma.session.findMany({
|
||||||
where: {
|
where: {
|
||||||
AND: [
|
AND: [
|
||||||
{ fullTranscriptUrl: { not: null } },
|
{ fullTranscriptUrl: { not: null } },
|
||||||
{ fullTranscriptUrl: { not: "" } }, // Ensure URL is not an empty string
|
{ messages: { none: {} } }, // No messages yet
|
||||||
{ transcriptContent: null },
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
select: {
|
include: {
|
||||||
id: true,
|
company: true,
|
||||||
fullTranscriptUrl: true,
|
messages: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (sessionsToUpdate.length === 0) {
|
if (sessionsNeedingTranscripts.length === 0) {
|
||||||
console.log("No sessions found requiring transcript fetching.");
|
console.log("No sessions found that need transcript fetching.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${sessionsToUpdate.length} sessions to update.`);
|
console.log(`Found ${sessionsNeedingTranscripts.length} sessions that need transcript fetching.`);
|
||||||
let successCount = 0;
|
let successCount = 0;
|
||||||
let errorCount = 0;
|
let errorCount = 0;
|
||||||
|
|
||||||
for (const session of sessionsToUpdate) {
|
for (const session of sessionsNeedingTranscripts) {
|
||||||
if (!session.fullTranscriptUrl) {
|
if (!session.fullTranscriptUrl) {
|
||||||
// Should not happen due to query, but good for type safety
|
console.warn(`Session ${session.id} has no transcript URL, skipping.`);
|
||||||
console.warn(`Session ${session.id} has no fullTranscriptUrl, skipping.`);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(
|
console.log(`Fetching transcript for session ${session.id}...`);
|
||||||
`Fetching transcript for session ${session.id} from ${session.fullTranscriptUrl}...`
|
|
||||||
);
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(session.fullTranscriptUrl);
|
// Fetch transcript content
|
||||||
if (!response.ok) {
|
const transcriptContent = await fetchTranscriptContent(
|
||||||
console.error(
|
session.fullTranscriptUrl,
|
||||||
`Failed to fetch transcript for session ${session.id}: ${response.status} ${response.statusText}`
|
session.company.csvUsername || undefined,
|
||||||
);
|
session.company.csvPassword || undefined
|
||||||
const errorBody = await response.text();
|
|
||||||
console.error(`Error details: ${errorBody.substring(0, 500)}`); // Log first 500 chars of error
|
|
||||||
errorCount++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const transcriptText = await response.text();
|
|
||||||
|
|
||||||
if (transcriptText.trim() === "") {
|
|
||||||
console.warn(
|
|
||||||
`Fetched empty transcript for session ${session.id}. Storing as empty string.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await prisma.session.update({
|
|
||||||
where: { id: session.id },
|
|
||||||
data: { transcriptContent: transcriptText },
|
|
||||||
});
|
|
||||||
console.log(
|
|
||||||
`Successfully fetched and stored transcript for session ${session.id}.`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!transcriptContent) {
|
||||||
|
throw new Error("Failed to fetch transcript content");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse transcript into messages
|
||||||
|
const messages = parseTranscriptToMessages(transcriptContent);
|
||||||
|
|
||||||
|
if (messages.length === 0) {
|
||||||
|
throw new Error("No messages found in transcript");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create messages in database
|
||||||
|
await prisma.message.createMany({
|
||||||
|
data: messages.map(msg => ({
|
||||||
|
sessionId: session.id,
|
||||||
|
timestamp: msg.timestamp,
|
||||||
|
role: msg.role,
|
||||||
|
content: msg.content,
|
||||||
|
order: msg.order,
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Successfully fetched transcript for session ${session.id} (${messages.length} messages)`);
|
||||||
successCount++;
|
successCount++;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error processing session ${session.id}:`, error);
|
console.error(`Error fetching transcript for session ${session.id}:`, error);
|
||||||
errorCount++;
|
errorCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("Transcript fetching complete.");
|
console.log("Transcript fetching complete.");
|
||||||
console.log(`Successfully updated: ${successCount} sessions.`);
|
console.log(`Successfully fetched: ${successCount} transcripts.`);
|
||||||
console.log(`Failed to update: ${errorCount} sessions.`);
|
console.log(`Failed to fetch: ${errorCount} transcripts.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
main()
|
// Run the main function
|
||||||
|
fetchTranscriptsForSessions()
|
||||||
.catch((e) => {
|
.catch((e) => {
|
||||||
console.error("An error occurred during the script execution:", e);
|
console.error("An error occurred during the script execution:", e);
|
||||||
process.exitCode = 1;
|
process.exitCode = 1;
|
||||||
|
|||||||
@ -1,68 +0,0 @@
|
|||||||
// Fix Trailing Whitespace
|
|
||||||
// This script removes trailing whitespace from specified file types
|
|
||||||
|
|
||||||
import fs from "fs";
|
|
||||||
import path from "path";
|
|
||||||
import { fileURLToPath } from "url";
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
|
||||||
const __dirname = path.dirname(__filename);
|
|
||||||
|
|
||||||
// Configure which file types to process
|
|
||||||
const fileTypes = [".ts", ".tsx", ".js", ".jsx", ".json", ".md", ".css"];
|
|
||||||
|
|
||||||
// Configure directories to ignore
|
|
||||||
const ignoreDirs = ["node_modules", ".next", ".git", "out", "build", "dist"];
|
|
||||||
|
|
||||||
// Recursively process directories
|
|
||||||
async function processDirectory(dir) {
|
|
||||||
try {
|
|
||||||
const files = await fs.promises.readdir(dir, { withFileTypes: true });
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
const fullPath = path.join(dir, file.name);
|
|
||||||
|
|
||||||
// Skip ignored directories
|
|
||||||
if (file.isDirectory()) {
|
|
||||||
if (!ignoreDirs.includes(file.name)) {
|
|
||||||
await processDirectory(fullPath);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process only files with matching extensions
|
|
||||||
const ext = path.extname(file.name);
|
|
||||||
if (!fileTypes.includes(ext)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Read and process the file
|
|
||||||
const content = await fs.promises.readFile(fullPath, "utf8");
|
|
||||||
|
|
||||||
// Remove trailing whitespace from each line
|
|
||||||
const processedContent = content
|
|
||||||
.split("\n")
|
|
||||||
.map((line) => line.replace(/\s+$/, ""))
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
// Only write if changes were made
|
|
||||||
if (processedContent !== content) {
|
|
||||||
await fs.promises.writeFile(fullPath, processedContent, "utf8");
|
|
||||||
console.log(`Fixed trailing whitespace in ${fullPath}`);
|
|
||||||
}
|
|
||||||
} catch (fileError) {
|
|
||||||
console.error(`Error processing file ${fullPath}:`, fileError);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (dirError) {
|
|
||||||
console.error(`Error reading directory ${dir}:`, dirError);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start processing from root directory
|
|
||||||
const rootDir = process.cwd();
|
|
||||||
console.log(`Starting whitespace cleanup from ${rootDir}`);
|
|
||||||
processDirectory(rootDir)
|
|
||||||
.then(() => console.log("Whitespace cleanup completed"))
|
|
||||||
.catch((err) => console.error("Error in whitespace cleanup:", err));
|
|
||||||
326
scripts/process_sessions.ts
Normal file
326
scripts/process_sessions.ts
Normal file
@ -0,0 +1,326 @@
|
|||||||
|
import { PrismaClient } from "@prisma/client";
|
||||||
|
import fetch from "node-fetch";
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||||
|
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||||
|
|
||||||
|
// Define the expected response structure from OpenAI
|
||||||
|
interface OpenAIProcessedData {
|
||||||
|
language: string;
|
||||||
|
messages_sent: number;
|
||||||
|
sentiment: "positive" | "neutral" | "negative";
|
||||||
|
escalated: boolean;
|
||||||
|
forwarded_hr: boolean;
|
||||||
|
category: string;
|
||||||
|
questions: string[];
|
||||||
|
summary: string;
|
||||||
|
session_id: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches transcript content from a URL
|
||||||
|
*/
|
||||||
|
async function fetchTranscriptContent(
|
||||||
|
url: string,
|
||||||
|
username?: string,
|
||||||
|
password?: string
|
||||||
|
): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const authHeader =
|
||||||
|
username && password
|
||||||
|
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
headers: authHeader ? { Authorization: authHeader } : {},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
console.warn(`Failed to fetch transcript from ${url}: ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return await response.text();
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Error fetching transcript from ${url}:`, error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes a session transcript using OpenAI API
|
||||||
|
*/
|
||||||
|
async function processTranscriptWithOpenAI(
|
||||||
|
sessionId: string,
|
||||||
|
transcript: string
|
||||||
|
): Promise<OpenAIProcessedData> {
|
||||||
|
if (!OPENAI_API_KEY) {
|
||||||
|
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||||
|
}
|
||||||
|
|
||||||
|
const systemMessage = `
|
||||||
|
You are an AI assistant tasked with analyzing chat transcripts.
|
||||||
|
Extract the following information from the transcript:
|
||||||
|
1. The primary language used by the user (ISO 639-1 code)
|
||||||
|
2. Number of messages sent by the user
|
||||||
|
3. Overall sentiment (positive, neutral, or negative)
|
||||||
|
4. Whether the conversation was escalated
|
||||||
|
5. Whether HR contact was mentioned or provided
|
||||||
|
6. The best-fitting category for the conversation from this list:
|
||||||
|
- Schedule & Hours
|
||||||
|
- Leave & Vacation
|
||||||
|
- Sick Leave & Recovery
|
||||||
|
- Salary & Compensation
|
||||||
|
- Contract & Hours
|
||||||
|
- Onboarding
|
||||||
|
- Offboarding
|
||||||
|
- Workwear & Staff Pass
|
||||||
|
- Team & Contacts
|
||||||
|
- Personal Questions
|
||||||
|
- Access & Login
|
||||||
|
- Social questions
|
||||||
|
- Unrecognized / Other
|
||||||
|
7. Up to 5 paraphrased questions asked by the user (in English)
|
||||||
|
8. A brief summary of the conversation (10-300 characters)
|
||||||
|
|
||||||
|
Return the data in JSON format matching this schema:
|
||||||
|
{
|
||||||
|
"language": "ISO 639-1 code",
|
||||||
|
"messages_sent": number,
|
||||||
|
"sentiment": "positive|neutral|negative",
|
||||||
|
"escalated": boolean,
|
||||||
|
"forwarded_hr": boolean,
|
||||||
|
"category": "one of the categories listed above",
|
||||||
|
"questions": ["question 1", "question 2", ...],
|
||||||
|
"summary": "brief summary",
|
||||||
|
"session_id": "${sessionId}"
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(OPENAI_API_URL, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: `Bearer ${OPENAI_API_KEY}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model: "gpt-4-turbo",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: systemMessage,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: transcript,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
temperature: 0.3,
|
||||||
|
response_format: { type: "json_object" },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as any;
|
||||||
|
const processedData = JSON.parse(data.choices[0].message.content);
|
||||||
|
|
||||||
|
validateOpenAIResponse(processedData);
|
||||||
|
return processedData;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing transcript with OpenAI:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the OpenAI response against our expected schema
|
||||||
|
*/
|
||||||
|
function validateOpenAIResponse(data: any): asserts data is OpenAIProcessedData {
|
||||||
|
const requiredFields = [
|
||||||
|
"language", "messages_sent", "sentiment", "escalated",
|
||||||
|
"forwarded_hr", "category", "questions", "summary", "session_id"
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!(field in data)) {
|
||||||
|
throw new Error(`Missing required field: ${field}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.language !== "string" || !/^[a-z]{2}$/.test(data.language)) {
|
||||||
|
throw new Error("Invalid language format. Expected ISO 639-1 code (e.g., 'en')");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.messages_sent !== "number" || data.messages_sent < 0) {
|
||||||
|
throw new Error("Invalid messages_sent. Expected non-negative number");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!["positive", "neutral", "negative"].includes(data.sentiment)) {
|
||||||
|
throw new Error("Invalid sentiment. Expected 'positive', 'neutral', or 'negative'");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.escalated !== "boolean") {
|
||||||
|
throw new Error("Invalid escalated. Expected boolean");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.forwarded_hr !== "boolean") {
|
||||||
|
throw new Error("Invalid forwarded_hr. Expected boolean");
|
||||||
|
}
|
||||||
|
|
||||||
|
const validCategories = [
|
||||||
|
"Schedule & Hours", "Leave & Vacation", "Sick Leave & Recovery",
|
||||||
|
"Salary & Compensation", "Contract & Hours", "Onboarding", "Offboarding",
|
||||||
|
"Workwear & Staff Pass", "Team & Contacts", "Personal Questions",
|
||||||
|
"Access & Login", "Social questions", "Unrecognized / Other"
|
||||||
|
];
|
||||||
|
|
||||||
|
if (!validCategories.includes(data.category)) {
|
||||||
|
throw new Error(`Invalid category. Expected one of: ${validCategories.join(", ")}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(data.questions)) {
|
||||||
|
throw new Error("Invalid questions. Expected array of strings");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.summary !== "string" || data.summary.length < 10 || data.summary.length > 300) {
|
||||||
|
throw new Error("Invalid summary. Expected string between 10-300 characters");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data.session_id !== "string") {
|
||||||
|
throw new Error("Invalid session_id. Expected string");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main function to process SessionImport records that need processing
|
||||||
|
*/
|
||||||
|
async function processUnprocessedSessions() {
|
||||||
|
console.log("Starting to process unprocessed SessionImport records...");
|
||||||
|
|
||||||
|
// Find SessionImport records that have transcript URLs and haven't been processed yet
|
||||||
|
const importsToProcess = await prisma.sessionImport.findMany({
|
||||||
|
where: {
|
||||||
|
fullTranscriptUrl: { not: null },
|
||||||
|
// Add any other conditions to identify unprocessed records
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
company: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (importsToProcess.length === 0) {
|
||||||
|
console.log("No SessionImport records found requiring processing.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Found ${importsToProcess.length} SessionImport records to process.`);
|
||||||
|
let successCount = 0;
|
||||||
|
let errorCount = 0;
|
||||||
|
|
||||||
|
for (const importRecord of importsToProcess) {
|
||||||
|
if (!importRecord.fullTranscriptUrl) {
|
||||||
|
console.warn(`SessionImport ${importRecord.id} has no transcript URL, skipping.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Processing transcript for SessionImport ${importRecord.id}...`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Mark as processing (status field doesn't exist in new schema)
|
||||||
|
console.log(`Processing SessionImport ${importRecord.id}...`);
|
||||||
|
|
||||||
|
// Fetch transcript content
|
||||||
|
const transcriptContent = await fetchTranscriptContent(
|
||||||
|
importRecord.fullTranscriptUrl,
|
||||||
|
importRecord.company.csvUsername || undefined,
|
||||||
|
importRecord.company.csvPassword || undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!transcriptContent) {
|
||||||
|
throw new Error("Failed to fetch transcript content");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process with OpenAI
|
||||||
|
const processedData = await processTranscriptWithOpenAI(
|
||||||
|
importRecord.externalSessionId,
|
||||||
|
transcriptContent
|
||||||
|
);
|
||||||
|
|
||||||
|
// Parse dates from raw strings
|
||||||
|
const startTime = new Date(importRecord.startTimeRaw);
|
||||||
|
const endTime = new Date(importRecord.endTimeRaw);
|
||||||
|
|
||||||
|
// Create or update Session record
|
||||||
|
const session = await prisma.session.upsert({
|
||||||
|
where: { importId: importRecord.id },
|
||||||
|
update: {
|
||||||
|
startTime: isNaN(startTime.getTime()) ? new Date() : startTime,
|
||||||
|
endTime: isNaN(endTime.getTime()) ? new Date() : endTime,
|
||||||
|
ipAddress: importRecord.ipAddress,
|
||||||
|
country: importRecord.countryCode,
|
||||||
|
language: processedData.language,
|
||||||
|
messagesSent: processedData.messages_sent,
|
||||||
|
sentiment: processedData.sentiment.toUpperCase() as "POSITIVE" | "NEUTRAL" | "NEGATIVE",
|
||||||
|
escalated: processedData.escalated,
|
||||||
|
forwardedHr: processedData.forwarded_hr,
|
||||||
|
fullTranscriptUrl: importRecord.fullTranscriptUrl,
|
||||||
|
avgResponseTime: importRecord.avgResponseTimeSeconds,
|
||||||
|
// Note: tokens, tokensEur, processed, questions fields don't exist in new schema
|
||||||
|
// category: processedData.category, // Category field needs enum mapping
|
||||||
|
initialMsg: importRecord.initialMessage,
|
||||||
|
summary: processedData.summary,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
companyId: importRecord.companyId,
|
||||||
|
importId: importRecord.id,
|
||||||
|
startTime: isNaN(startTime.getTime()) ? new Date() : startTime,
|
||||||
|
endTime: isNaN(endTime.getTime()) ? new Date() : endTime,
|
||||||
|
ipAddress: importRecord.ipAddress,
|
||||||
|
country: importRecord.countryCode,
|
||||||
|
language: processedData.language,
|
||||||
|
messagesSent: processedData.messages_sent,
|
||||||
|
sentiment: processedData.sentiment.toUpperCase() as "POSITIVE" | "NEUTRAL" | "NEGATIVE",
|
||||||
|
escalated: processedData.escalated,
|
||||||
|
forwardedHr: processedData.forwarded_hr,
|
||||||
|
fullTranscriptUrl: importRecord.fullTranscriptUrl,
|
||||||
|
avgResponseTime: importRecord.avgResponseTimeSeconds,
|
||||||
|
// Note: tokens, tokensEur, processed, questions, category fields don't exist in new schema
|
||||||
|
initialMsg: importRecord.initialMessage,
|
||||||
|
summary: processedData.summary,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mark SessionImport as processed (processedAt field doesn't exist in new schema)
|
||||||
|
console.log(`Successfully processed SessionImport ${importRecord.id} -> Session ${session.id}`);
|
||||||
|
|
||||||
|
console.log(`Successfully processed SessionImport ${importRecord.id} -> Session ${session.id}`);
|
||||||
|
successCount++;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing SessionImport ${importRecord.id}:`, error);
|
||||||
|
|
||||||
|
// Log error (status and errorMsg fields don't exist in new schema)
|
||||||
|
console.error(`Failed to process SessionImport ${importRecord.id}: ${error instanceof Error ? error.message : String(error)}`);
|
||||||
|
|
||||||
|
errorCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("SessionImport processing complete.");
|
||||||
|
console.log(`Successfully processed: ${successCount} records.`);
|
||||||
|
console.log(`Failed to process: ${errorCount} records.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the main function
|
||||||
|
processUnprocessedSessions()
|
||||||
|
.catch((e) => {
|
||||||
|
console.error("An error occurred during the script execution:", e);
|
||||||
|
process.exitCode = 1;
|
||||||
|
})
|
||||||
|
.finally(async () => {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user