mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 11:12:11 +01:00
Compare commits
2 Commits
developmen
...
71c8aff125
| Author | SHA1 | Date | |
|---|---|---|---|
|
71c8aff125
|
|||
|
0c18e8be57
|
@ -1 +0,0 @@
|
||||
Use pnpm to manage this project, not npm!
|
||||
9
.env.development
Normal file
9
.env.development
Normal file
@ -0,0 +1,9 @@
|
||||
# Development environment settings
|
||||
# This file ensures NextAuth always has necessary environment variables in development
|
||||
|
||||
# NextAuth.js configuration
|
||||
NEXTAUTH_URL=http://192.168.1.2:3000
|
||||
NEXTAUTH_SECRET=this_is_a_fixed_secret_for_development_only
|
||||
NODE_ENV=development
|
||||
|
||||
# Database connection - already configured in your prisma/schema.prisma
|
||||
26
.env.example
26
.env.example
@ -1,26 +0,0 @@
|
||||
# Development environment settings
|
||||
# This file ensures NextAuth always has necessary environment variables in development
|
||||
|
||||
# NextAuth.js configuration
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
NEXTAUTH_SECRET="this_is_a_fixed_secret_for_development_only"
|
||||
NODE_ENV="development"
|
||||
|
||||
# OpenAI API key for session processing
|
||||
# Add your API key here: OPENAI_API_KEY=sk-...
|
||||
OPENAI_API_KEY="your_openai_api_key_here"
|
||||
|
||||
# Database connection - already configured in your prisma/schema.prisma
|
||||
|
||||
# Scheduler Configuration
|
||||
SCHEDULER_ENABLED="false" # Enable/disable all schedulers (false for dev, true for production)
|
||||
CSV_IMPORT_INTERVAL="*/15 * * * *" # Cron expression for CSV imports (every 15 minutes)
|
||||
IMPORT_PROCESSING_INTERVAL="*/5 * * * *" # Cron expression for processing imports to sessions (every 5 minutes)
|
||||
IMPORT_PROCESSING_BATCH_SIZE="50" # Number of imports to process at once
|
||||
SESSION_PROCESSING_INTERVAL="0 * * * *" # Cron expression for AI session processing (every hour)
|
||||
SESSION_PROCESSING_BATCH_SIZE="0" # 0 = unlimited sessions, >0 = specific limit
|
||||
SESSION_PROCESSING_CONCURRENCY="5" # How many sessions to process in parallel
|
||||
|
||||
# Postgres Database Configuration
|
||||
DATABASE_URL_TEST="postgresql://"
|
||||
DATABASE_URL="postgresql://"
|
||||
@ -1,29 +0,0 @@
|
||||
# Copy this file to .env.local and configure as needed
|
||||
|
||||
# NextAuth.js configuration
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
NEXTAUTH_SECRET="your_secret_key_here"
|
||||
NODE_ENV="development"
|
||||
|
||||
# OpenAI API key for session processing
|
||||
OPENAI_API_KEY="your_openai_api_key_here"
|
||||
|
||||
# Scheduler Configuration
|
||||
SCHEDULER_ENABLED="true" # Set to false to disable all schedulers during development
|
||||
CSV_IMPORT_INTERVAL="*/15 * * * *" # Every 15 minutes (cron format)
|
||||
IMPORT_PROCESSING_INTERVAL="*/5 * * * *" # Every 5 minutes (cron format) - converts imports to sessions
|
||||
IMPORT_PROCESSING_BATCH_SIZE="50" # Number of imports to process at once
|
||||
SESSION_PROCESSING_INTERVAL="0 * * * *" # Every hour (cron format) - AI processing
|
||||
SESSION_PROCESSING_BATCH_SIZE="0" # 0 = process all sessions, >0 = limit number
|
||||
SESSION_PROCESSING_CONCURRENCY="5" # Number of sessions to process in parallel
|
||||
|
||||
# Postgres Database Configuration
|
||||
DATABASE_URL_TEST="postgresql://"
|
||||
DATABASE_URL="postgresql://"
|
||||
|
||||
# Example configurations:
|
||||
# - For development (no schedulers): SCHEDULER_ENABLED=false
|
||||
# - For testing (every 5 minutes): CSV_IMPORT_INTERVAL=*/5 * * * *
|
||||
# - For faster import processing: IMPORT_PROCESSING_INTERVAL=*/2 * * * *
|
||||
# - For limited processing: SESSION_PROCESSING_BATCH_SIZE=10
|
||||
# - For high concurrency: SESSION_PROCESSING_CONCURRENCY=10
|
||||
188
.gitignore
vendored
188
.gitignore
vendored
@ -261,7 +261,189 @@ Thumbs.db
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node,macos
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos
|
||||
|
||||
# OpenAI API request samples
|
||||
sample-openai-request.json
|
||||
admin-user.txt
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Moved from ./templates for ignoring all locks in templates
|
||||
templates/**/*-lock.*
|
||||
templates/**/*.lock
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
### Node Patch ###
|
||||
# Serverless Webpack directories
|
||||
.webpack/
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
# SvelteKit build / generate output
|
||||
.svelte-kit
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/node,macos
|
||||
|
||||
# Wrangler output
|
||||
.wrangler/
|
||||
build/
|
||||
|
||||
# Turbo output
|
||||
.turbo/
|
||||
|
||||
.dev.vars*
|
||||
test-transcript-format.js
|
||||
|
||||
54
.prettierignore
Normal file
54
.prettierignore
Normal file
@ -0,0 +1,54 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
.pnpm-store/
|
||||
|
||||
# Build outputs
|
||||
.next/
|
||||
out/
|
||||
dist/
|
||||
build/
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite
|
||||
prisma/migrations/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Git
|
||||
.git/
|
||||
|
||||
# Coverage reports
|
||||
coverage/
|
||||
|
||||
# Playwright
|
||||
test-results/
|
||||
playwright-report/
|
||||
playwright/.cache/
|
||||
|
||||
# Generated files
|
||||
*.generated.*
|
||||
|
||||
pnpm-lock.yaml
|
||||
106
README.md
106
README.md
@ -2,65 +2,65 @@
|
||||
|
||||
A real-time analytics dashboard for monitoring user sessions and interactions with interactive data visualizations and detailed metrics.
|
||||
|
||||
.*%22&replace=%24%3Cversion%3E&logo=nextdotjs&label=Nextjs&color=%23000000)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=react&label=React&color=%2361DAFB)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=typescript&label=TypeScript&color=%233178C6)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=prisma&label=Prisma&color=%232D3748)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=tailwindcss&label=TailwindCSS&color=%2306B6D4)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=nextdotjs&label=Nextjs&color=%23000000>)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=react&label=React&color=%2361DAFB>)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=typescript&label=TypeScript&color=%233178C6>)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=prisma&label=Prisma&color=%232D3748>)
|
||||
.*%22&replace=%24%3Cversion%3E&logo=tailwindcss&label=TailwindCSS&color=%2306B6D4>)
|
||||
|
||||
## Features
|
||||
|
||||
- **Real-time Session Monitoring**: Track and analyze user sessions as they happen
|
||||
- **Interactive Visualizations**: Geographic maps, response time distributions, and more
|
||||
- **Advanced Analytics**: Detailed metrics and insights about user behavior
|
||||
- **User Management**: Secure authentication with role-based access control
|
||||
- **Customizable Dashboard**: Filter and sort data based on your specific needs
|
||||
- **Session Details**: In-depth analysis of individual user sessions
|
||||
- **Real-time Session Monitoring**: Track and analyze user sessions as they happen
|
||||
- **Interactive Visualizations**: Geographic maps, response time distributions, and more
|
||||
- **Advanced Analytics**: Detailed metrics and insights about user behavior
|
||||
- **User Management**: Secure authentication with role-based access control
|
||||
- **Customizable Dashboard**: Filter and sort data based on your specific needs
|
||||
- **Session Details**: In-depth analysis of individual user sessions
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **Frontend**: React 19, Next.js 15, TailwindCSS 4
|
||||
- **Backend**: Next.js API Routes, Node.js
|
||||
- **Database**: Prisma ORM with SQLite (default), compatible with PostgreSQL
|
||||
- **Authentication**: NextAuth.js
|
||||
- **Visualization**: Chart.js, D3.js, React Leaflet
|
||||
- **Data Processing**: Node-cron for scheduled tasks
|
||||
- **Frontend**: React 19, Next.js 15, TailwindCSS 4
|
||||
- **Backend**: Next.js API Routes, Node.js
|
||||
- **Database**: Prisma ORM with SQLite (default), compatible with PostgreSQL
|
||||
- **Authentication**: NextAuth.js
|
||||
- **Visualization**: Chart.js, D3.js, React Leaflet
|
||||
- **Data Processing**: Node-cron for scheduled tasks
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js (LTS version recommended)
|
||||
- npm or yarn
|
||||
- Node.js (LTS version recommended)
|
||||
- npm or yarn
|
||||
|
||||
### Installation
|
||||
|
||||
1. Clone this repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/kjanat/livedash-node.git
|
||||
cd livedash-node
|
||||
```
|
||||
```bash
|
||||
git clone https://github.com/kjanat/livedash-node.git
|
||||
cd livedash-node
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
3. Set up the database:
|
||||
|
||||
```bash
|
||||
npm run prisma:generate
|
||||
npm run prisma:migrate
|
||||
npm run prisma:seed
|
||||
```
|
||||
```bash
|
||||
npm run prisma:generate
|
||||
npm run prisma:migrate
|
||||
npm run prisma:seed
|
||||
```
|
||||
|
||||
4. Start the development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
5. Open your browser and navigate to <http://localhost:3000>
|
||||
|
||||
@ -76,22 +76,22 @@ NEXTAUTH_SECRET=your-secret-here
|
||||
|
||||
## Project Structure
|
||||
|
||||
- `app/`: Next.js App Router components and pages
|
||||
- `components/`: Reusable React components
|
||||
- `lib/`: Utility functions and shared code
|
||||
- `pages/`: API routes and server-side code
|
||||
- `prisma/`: Database schema and migrations
|
||||
- `public/`: Static assets
|
||||
- `docs/`: Project documentation
|
||||
- `app/`: Next.js App Router components and pages
|
||||
- `components/`: Reusable React components
|
||||
- `lib/`: Utility functions and shared code
|
||||
- `pages/`: API routes and server-side code
|
||||
- `prisma/`: Database schema and migrations
|
||||
- `public/`: Static assets
|
||||
- `docs/`: Project documentation
|
||||
|
||||
## Available Scripts
|
||||
|
||||
- `npm run dev`: Start the development server
|
||||
- `npm run build`: Build the application for production
|
||||
- `npm run start`: Run the production build
|
||||
- `npm run lint`: Run ESLint
|
||||
- `npm run format`: Format code with Prettier
|
||||
- `npm run prisma:studio`: Open Prisma Studio to view database
|
||||
- `npm run dev`: Start the development server
|
||||
- `npm run build`: Build the application for production
|
||||
- `npm run start`: Run the production build
|
||||
- `npm run lint`: Run ESLint
|
||||
- `npm run format`: Format code with Prettier
|
||||
- `npm run prisma:studio`: Open Prisma Studio to view database
|
||||
|
||||
## Contributing
|
||||
|
||||
@ -107,9 +107,9 @@ This project is not licensed for commercial use without explicit permission. Fre
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
- [Next.js](https://nextjs.org/)
|
||||
- [Prisma](https://prisma.io/)
|
||||
- [TailwindCSS](https://tailwindcss.com/)
|
||||
- [Chart.js](https://www.chartjs.org/)
|
||||
- [D3.js](https://d3js.org/)
|
||||
- [React Leaflet](https://react-leaflet.js.org/)
|
||||
- [Next.js](https://nextjs.org/)
|
||||
- [Prisma](https://prisma.io/)
|
||||
- [TailwindCSS](https://tailwindcss.com/)
|
||||
- [Chart.js](https://www.chartjs.org/)
|
||||
- [D3.js](https://d3js.org/)
|
||||
- [React Leaflet](https://react-leaflet.js.org/)
|
||||
|
||||
108
TODO.md
Normal file
108
TODO.md
Normal file
@ -0,0 +1,108 @@
|
||||
# TODO.md
|
||||
|
||||
## Dashboard Integration
|
||||
|
||||
- [ ] **Resolve `GeographicMap.tsx` and `ResponseTimeDistribution.tsx` data simulation**
|
||||
- Investigate integrating real data sources with server-side analytics
|
||||
- Replace simulated data mentioned in `docs/dashboard-components.md`
|
||||
|
||||
## Component Specific
|
||||
|
||||
- [ ] **Implement robust emailing of temporary passwords**
|
||||
|
||||
- File: `pages/api/dashboard/users.ts`
|
||||
- Set up proper email service integration
|
||||
|
||||
- [x] **Session page improvements** ✅
|
||||
- File: `app/dashboard/sessions/page.tsx`
|
||||
- Implemented pagination, advanced filtering, and sorting
|
||||
|
||||
## File Cleanup
|
||||
|
||||
- [x] **Remove backup files** ✅
|
||||
- Reviewed and removed `.bak` and `.new` files after integration
|
||||
- Cleaned up `GeographicMap.tsx.bak`, `SessionDetails.tsx.bak`, `SessionDetails.tsx.new`
|
||||
|
||||
## Database Schema Improvements
|
||||
|
||||
- [ ] **Update EndTime field**
|
||||
|
||||
- Make `endTime` field nullable in Prisma schema to match TypeScript interfaces
|
||||
|
||||
- [ ] **Add database indices**
|
||||
|
||||
- Add appropriate indices to improve query performance
|
||||
- Focus on dashboard metrics and session listing queries
|
||||
|
||||
- [ ] **Implement production email service**
|
||||
- Replace console logging in `lib/sendEmail.ts`
|
||||
- Consider providers: Nodemailer, SendGrid, AWS SES
|
||||
|
||||
## General Enhancements & Features
|
||||
|
||||
- [ ] **Real-time updates**
|
||||
|
||||
- Implement for dashboard and session list
|
||||
- Consider WebSockets or Server-Sent Events
|
||||
|
||||
- [ ] **Data export functionality**
|
||||
|
||||
- Allow users (especially admins) to export session data
|
||||
- Support CSV format initially
|
||||
|
||||
- [ ] **Customizable dashboard**
|
||||
- Allow users to customize dashboard view
|
||||
- Let users choose which metrics/charts are most important
|
||||
|
||||
## Testing & Quality Assurance
|
||||
|
||||
- [ ] **Comprehensive testing suite**
|
||||
|
||||
- [ ] Unit tests for utility functions and API logic
|
||||
- [ ] Integration tests for API endpoints with database
|
||||
- [ ] End-to-end tests for user flows (Playwright or Cypress)
|
||||
|
||||
- [ ] **Error monitoring and logging**
|
||||
|
||||
- Integrate robust error monitoring service (Sentry)
|
||||
- Enhance server-side logging
|
||||
|
||||
- [ ] **Accessibility improvements**
|
||||
- Review application against WCAG guidelines
|
||||
- Improve keyboard navigation and screen reader compatibility
|
||||
- Check color contrast ratios
|
||||
|
||||
## Security Enhancements
|
||||
|
||||
- [x] **Password reset functionality** ✅
|
||||
|
||||
- Implemented secure password reset mechanism
|
||||
- Files: `app/forgot-password/page.tsx`, `app/reset-password/page.tsx`, `pages/api/forgot-password.ts`, `pages/api/reset-password.ts`
|
||||
|
||||
- [ ] **Two-Factor Authentication (2FA)**
|
||||
|
||||
- Consider adding 2FA, especially for admin accounts
|
||||
|
||||
- [ ] **Input validation and sanitization**
|
||||
- Review all user inputs (API request bodies, query parameters)
|
||||
- Ensure proper validation and sanitization
|
||||
|
||||
## Code Quality & Development
|
||||
|
||||
- [ ] **Code review process**
|
||||
|
||||
- Enforce code reviews for all changes
|
||||
|
||||
- [ ] **Environment configuration**
|
||||
|
||||
- Ensure secure management of environment-specific configurations
|
||||
|
||||
- [ ] **Dependency management**
|
||||
|
||||
- Periodically review dependencies for vulnerabilities
|
||||
- Keep dependencies updated
|
||||
|
||||
- [ ] **Documentation updates**
|
||||
- [ ] Ensure `docs/dashboard-components.md` reflects actual implementations
|
||||
- [ ] Verify "Dashboard Enhancements" are consistently applied
|
||||
- [ ] Update documentation for improved layout and visual hierarchies
|
||||
@ -1,136 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { fetchAndParseCsv } from "../../../../lib/csvFetcher";
|
||||
import { processQueuedImports } from "../../../../lib/importProcessor";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
let { companyId } = body;
|
||||
|
||||
if (!companyId) {
|
||||
// Try to get user from prisma based on session cookie
|
||||
try {
|
||||
const session = await prisma.session.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
where: {
|
||||
/* Add session check criteria here */
|
||||
},
|
||||
});
|
||||
|
||||
if (session) {
|
||||
companyId = session.companyId;
|
||||
}
|
||||
} catch (error) {
|
||||
// Log error for server-side debugging
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
// Use a server-side logging approach instead of console
|
||||
process.stderr.write(`Error fetching session: ${errorMessage}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!companyId) {
|
||||
return NextResponse.json(
|
||||
{ error: "Company ID is required" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
const company = await prisma.company.findUnique({ where: { id: companyId } });
|
||||
if (!company) {
|
||||
return NextResponse.json(
|
||||
{ error: "Company not found" },
|
||||
{ status: 404 }
|
||||
);
|
||||
}
|
||||
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
let importedCount = 0;
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
try {
|
||||
// Use upsert to handle duplicates gracefully
|
||||
await prisma.sessionImport.upsert({
|
||||
where: {
|
||||
companyId_externalSessionId: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
// Update existing record with latest data
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
create: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
});
|
||||
importedCount++;
|
||||
} catch (error) {
|
||||
// Log individual session import errors but continue processing
|
||||
process.stderr.write(
|
||||
`Failed to import session ${rawSession.externalSessionId}: ${error}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Immediately process the queued imports to create Session records
|
||||
console.log('[Refresh API] Processing queued imports...');
|
||||
await processQueuedImports(100); // Process up to 100 imports immediately
|
||||
|
||||
// Count how many sessions were created
|
||||
const sessionCount = await prisma.session.count({
|
||||
where: { companyId: company.id }
|
||||
});
|
||||
|
||||
return NextResponse.json({
|
||||
ok: true,
|
||||
imported: importedCount,
|
||||
total: rawSessionData.length,
|
||||
sessions: sessionCount,
|
||||
message: `Successfully imported ${importedCount} records and processed them into sessions. Total sessions: ${sessionCount}`
|
||||
});
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e.message : "An unknown error occurred";
|
||||
return NextResponse.json({ error }, { status: 500 });
|
||||
}
|
||||
}
|
||||
@ -1,105 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { processUnprocessedSessions } from "../../../../lib/processingScheduler";
|
||||
import { ProcessingStatusManager } from "../../../../lib/processingStatusManager";
|
||||
import { ProcessingStage } from "@prisma/client";
|
||||
|
||||
interface SessionUser {
|
||||
email: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface SessionData {
|
||||
user: SessionUser;
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const session = (await getServerSession(authOptions)) as SessionData | null;
|
||||
|
||||
if (!session?.user) {
|
||||
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email },
|
||||
include: { company: true },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user found" }, { status: 401 });
|
||||
}
|
||||
|
||||
// Check if user has ADMIN role
|
||||
if (user.role !== "ADMIN") {
|
||||
return NextResponse.json(
|
||||
{ error: "Admin access required" },
|
||||
{ status: 403 }
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Get optional parameters from request body
|
||||
const body = await request.json();
|
||||
const { batchSize, maxConcurrency } = body;
|
||||
|
||||
// Validate parameters
|
||||
const validatedBatchSize = batchSize && batchSize > 0 ? parseInt(batchSize) : null;
|
||||
const validatedMaxConcurrency = maxConcurrency && maxConcurrency > 0 ? parseInt(maxConcurrency) : 5;
|
||||
|
||||
// Check how many sessions need AI processing using the new status system
|
||||
const sessionsNeedingAI = await ProcessingStatusManager.getSessionsNeedingProcessing(
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
1000 // Get count only
|
||||
);
|
||||
|
||||
// Filter to sessions for this company
|
||||
const companySessionsNeedingAI = sessionsNeedingAI.filter(
|
||||
statusRecord => statusRecord.session.companyId === user.companyId
|
||||
);
|
||||
|
||||
const unprocessedCount = companySessionsNeedingAI.length;
|
||||
|
||||
if (unprocessedCount === 0) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: "No sessions requiring AI processing found",
|
||||
unprocessedCount: 0,
|
||||
processedCount: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Start processing (this will run asynchronously)
|
||||
const startTime = Date.now();
|
||||
|
||||
// Note: We're calling the function but not awaiting it to avoid timeout
|
||||
// The processing will continue in the background
|
||||
processUnprocessedSessions(validatedBatchSize, validatedMaxConcurrency)
|
||||
.then(() => {
|
||||
console.log(`[Manual Trigger] Processing completed for company ${user.companyId}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(`[Manual Trigger] Processing failed for company ${user.companyId}:`, error);
|
||||
});
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Started processing ${unprocessedCount} unprocessed sessions`,
|
||||
unprocessedCount,
|
||||
batchSize: validatedBatchSize || unprocessedCount,
|
||||
maxConcurrency: validatedMaxConcurrency,
|
||||
startedAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error("[Manual Trigger] Error:", error);
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: "Failed to trigger processing",
|
||||
details: error instanceof Error ? error.message : String(error),
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -1,51 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getServerSession(authOptions);
|
||||
if (!session?.user) {
|
||||
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||
}
|
||||
|
||||
// Get company data
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: user.companyId },
|
||||
});
|
||||
|
||||
return NextResponse.json({ company });
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const session = await getServerSession(authOptions);
|
||||
if (!session?.user) {
|
||||
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||
}
|
||||
|
||||
const body = await request.json();
|
||||
const { csvUrl } = body;
|
||||
|
||||
await prisma.company.update({
|
||||
where: { id: user.companyId },
|
||||
data: { csvUrl },
|
||||
});
|
||||
|
||||
return NextResponse.json({ ok: true });
|
||||
}
|
||||
@ -1,138 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { sessionMetrics } from "../../../../lib/metrics";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
import { ChatSession } from "../../../../lib/types";
|
||||
|
||||
interface SessionUser {
|
||||
email: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface SessionData {
|
||||
user: SessionUser;
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = (await getServerSession(authOptions)) as SessionData | null;
|
||||
if (!session?.user) {
|
||||
return NextResponse.json({ error: "Not logged in" }, { status: 401 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email },
|
||||
include: { company: true },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||
}
|
||||
|
||||
// Get date range from query parameters
|
||||
const { searchParams } = new URL(request.url);
|
||||
const startDate = searchParams.get("startDate");
|
||||
const endDate = searchParams.get("endDate");
|
||||
|
||||
// Build where clause with optional date filtering
|
||||
const whereClause: any = {
|
||||
companyId: user.companyId,
|
||||
};
|
||||
|
||||
if (startDate && endDate) {
|
||||
whereClause.startTime = {
|
||||
gte: new Date(startDate),
|
||||
lte: new Date(endDate + 'T23:59:59.999Z'), // Include full end date
|
||||
};
|
||||
}
|
||||
|
||||
const prismaSessions = await prisma.session.findMany({
|
||||
where: whereClause,
|
||||
include: {
|
||||
messages: true, // Include messages for question extraction
|
||||
},
|
||||
});
|
||||
|
||||
// Convert Prisma sessions to ChatSession[] type for sessionMetrics
|
||||
const chatSessions: ChatSession[] = prismaSessions.map((ps) => ({
|
||||
id: ps.id, // Map Prisma's id to ChatSession.id
|
||||
sessionId: ps.id, // Map Prisma's id to ChatSession.sessionId
|
||||
companyId: ps.companyId,
|
||||
startTime: new Date(ps.startTime), // Ensure startTime is a Date object
|
||||
endTime: ps.endTime ? new Date(ps.endTime) : null, // Ensure endTime is a Date object or null
|
||||
transcriptContent: "", // Session model doesn't have transcriptContent field
|
||||
createdAt: new Date(ps.createdAt), // Map Prisma's createdAt
|
||||
updatedAt: new Date(ps.createdAt), // Use createdAt for updatedAt as Session model doesn't have updatedAt
|
||||
category: ps.category || undefined,
|
||||
language: ps.language || undefined,
|
||||
country: ps.country || undefined,
|
||||
ipAddress: ps.ipAddress || undefined,
|
||||
sentiment: ps.sentiment === null ? undefined : ps.sentiment,
|
||||
messagesSent: ps.messagesSent === null ? undefined : ps.messagesSent, // Handle null messagesSent
|
||||
avgResponseTime:
|
||||
ps.avgResponseTime === null ? undefined : ps.avgResponseTime,
|
||||
escalated: ps.escalated || false,
|
||||
forwardedHr: ps.forwardedHr || false,
|
||||
initialMsg: ps.initialMsg || undefined,
|
||||
fullTranscriptUrl: ps.fullTranscriptUrl || undefined,
|
||||
summary: ps.summary || undefined, // Include summary field
|
||||
messages: ps.messages || [], // Include messages for question extraction
|
||||
// userId is missing in Prisma Session model, assuming it's not strictly needed for metrics or can be null
|
||||
userId: undefined, // Or some other default/mapping if available
|
||||
}));
|
||||
|
||||
// Pass company config to metrics
|
||||
const companyConfigForMetrics = {
|
||||
sentimentAlert:
|
||||
user.company.sentimentAlert === null
|
||||
? undefined
|
||||
: user.company.sentimentAlert,
|
||||
};
|
||||
|
||||
const metrics = sessionMetrics(chatSessions, companyConfigForMetrics);
|
||||
|
||||
// Calculate date range from the FILTERED sessions to match what's actually displayed
|
||||
let dateRange: { minDate: string; maxDate: string } | null = null;
|
||||
let availableDataRange: { minDate: string; maxDate: string } | null = null;
|
||||
|
||||
// Get the full available range for reference
|
||||
const allSessions = await prisma.session.findMany({
|
||||
where: {
|
||||
companyId: user.companyId,
|
||||
},
|
||||
select: {
|
||||
startTime: true,
|
||||
},
|
||||
orderBy: {
|
||||
startTime: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
if (allSessions.length > 0) {
|
||||
availableDataRange = {
|
||||
minDate: allSessions[0].startTime.toISOString().split('T')[0], // First session date
|
||||
maxDate: allSessions[allSessions.length - 1].startTime.toISOString().split('T')[0] // Last session date
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate date range from the filtered sessions (what's actually being displayed)
|
||||
if (prismaSessions.length > 0) {
|
||||
const sortedFilteredSessions = prismaSessions.sort((a, b) =>
|
||||
new Date(a.startTime).getTime() - new Date(b.startTime).getTime()
|
||||
);
|
||||
dateRange = {
|
||||
minDate: sortedFilteredSessions[0].startTime.toISOString().split('T')[0],
|
||||
maxDate: sortedFilteredSessions[sortedFilteredSessions.length - 1].startTime.toISOString().split('T')[0]
|
||||
};
|
||||
} else if (availableDataRange) {
|
||||
// If no filtered sessions but we have available data, use the available range
|
||||
dateRange = availableDataRange;
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
metrics,
|
||||
csvUrl: user.company.csvUrl,
|
||||
company: user.company,
|
||||
dateRange,
|
||||
});
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const session = await getServerSession(authOptions);
|
||||
if (!session?.user || session.user.role !== "ADMIN") {
|
||||
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||
}
|
||||
|
||||
const body = await request.json();
|
||||
const { csvUrl, csvUsername, csvPassword, sentimentThreshold } = body;
|
||||
|
||||
await prisma.company.update({
|
||||
where: { id: user.companyId },
|
||||
data: {
|
||||
csvUrl,
|
||||
csvUsername,
|
||||
...(csvPassword ? { csvPassword } : {}),
|
||||
sentimentAlert: sentimentThreshold
|
||||
? parseFloat(sentimentThreshold)
|
||||
: null,
|
||||
},
|
||||
});
|
||||
|
||||
return NextResponse.json({ ok: true });
|
||||
}
|
||||
@ -1,80 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import crypto from "crypto";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
|
||||
interface UserBasicInfo {
|
||||
id: string;
|
||||
email: string;
|
||||
role: string;
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getServerSession(authOptions);
|
||||
if (!session?.user || session.user.role !== "ADMIN") {
|
||||
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||
}
|
||||
|
||||
const users = await prisma.user.findMany({
|
||||
where: { companyId: user.companyId },
|
||||
});
|
||||
|
||||
const mappedUsers: UserBasicInfo[] = users.map((u) => ({
|
||||
id: u.id,
|
||||
email: u.email,
|
||||
role: u.role,
|
||||
}));
|
||||
|
||||
return NextResponse.json({ users: mappedUsers });
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const session = await getServerSession(authOptions);
|
||||
if (!session?.user || session.user.role !== "ADMIN") {
|
||||
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json({ error: "No user" }, { status: 401 });
|
||||
}
|
||||
|
||||
const body = await request.json();
|
||||
const { email, role } = body;
|
||||
|
||||
if (!email || !role) {
|
||||
return NextResponse.json({ error: "Missing fields" }, { status: 400 });
|
||||
}
|
||||
|
||||
const exists = await prisma.user.findUnique({ where: { email } });
|
||||
if (exists) {
|
||||
return NextResponse.json({ error: "Email exists" }, { status: 409 });
|
||||
}
|
||||
|
||||
const tempPassword = crypto.randomBytes(12).toString("base64").slice(0, 12); // secure random initial password
|
||||
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: await bcrypt.hash(tempPassword, 10),
|
||||
companyId: user.companyId,
|
||||
role,
|
||||
},
|
||||
});
|
||||
|
||||
// TODO: Email user their temp password (stub, for demo) - Implement a robust and secure email sending mechanism. Consider using a transactional email service.
|
||||
return NextResponse.json({ ok: true, tempPassword });
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { sendEmail } from "../../../lib/sendEmail";
|
||||
import crypto from "crypto";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const body = await request.json();
|
||||
const { email } = body as { email: string };
|
||||
|
||||
const user = await prisma.user.findUnique({ where: { email } });
|
||||
if (!user) {
|
||||
// Always return 200 for privacy (don't reveal if email exists)
|
||||
return NextResponse.json({ success: true }, { status: 200 });
|
||||
}
|
||||
|
||||
const token = crypto.randomBytes(32).toString("hex");
|
||||
const expiry = new Date(Date.now() + 1000 * 60 * 30); // 30 min expiry
|
||||
|
||||
await prisma.user.update({
|
||||
where: { email },
|
||||
data: { resetToken: token, resetTokenExpiry: expiry },
|
||||
});
|
||||
|
||||
const resetUrl = `${process.env.NEXTAUTH_URL || "http://localhost:3000"}/reset-password?token=${token}`;
|
||||
await sendEmail(email, "Password Reset", `Reset your password: ${resetUrl}`);
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 });
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
interface RegisterRequestBody {
|
||||
email: string;
|
||||
password: string;
|
||||
company: string;
|
||||
csvUrl?: string;
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const body = await request.json();
|
||||
const { email, password, company, csvUrl } = body as RegisterRequestBody;
|
||||
|
||||
if (!email || !password || !company) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "Missing required fields",
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check if email exists
|
||||
const exists = await prisma.user.findUnique({
|
||||
where: { email },
|
||||
});
|
||||
|
||||
if (exists) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: "Email already exists",
|
||||
},
|
||||
{ status: 409 }
|
||||
);
|
||||
}
|
||||
|
||||
const newCompany = await prisma.company.create({
|
||||
data: { name: company, csvUrl: csvUrl || "" },
|
||||
});
|
||||
|
||||
const hashed = await bcrypt.hash(password, 10);
|
||||
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: hashed,
|
||||
companyId: newCompany.id,
|
||||
role: "ADMIN",
|
||||
},
|
||||
});
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: { success: true },
|
||||
},
|
||||
{ status: 201 }
|
||||
);
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const body = await request.json();
|
||||
const { token, password } = body as { token?: string; password?: string };
|
||||
|
||||
if (!token || !password) {
|
||||
return NextResponse.json(
|
||||
{ error: "Token and password are required." },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (password.length < 8) {
|
||||
return NextResponse.json(
|
||||
{ error: "Password must be at least 8 characters long." },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const user = await prisma.user.findFirst({
|
||||
where: {
|
||||
resetToken: token,
|
||||
resetTokenExpiry: { gte: new Date() },
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: "Invalid or expired token. Please request a new password reset.",
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
const hash = await bcrypt.hash(password, 10);
|
||||
await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: {
|
||||
password: hash,
|
||||
resetToken: null,
|
||||
resetTokenExpiry: null,
|
||||
},
|
||||
});
|
||||
|
||||
return NextResponse.json(
|
||||
{ message: "Password has been reset successfully." },
|
||||
{ status: 200 }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Reset password error:", error);
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: "An internal server error occurred. Please try again later.",
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -4,6 +4,10 @@ import { useState, useEffect } from "react";
|
||||
import { useSession } from "next-auth/react";
|
||||
import { Company } from "../../../lib/types";
|
||||
|
||||
interface CompanyConfigResponse {
|
||||
company: Company;
|
||||
}
|
||||
|
||||
export default function CompanySettingsPage() {
|
||||
const { data: session, status } = useSession();
|
||||
// We store the full company object for future use and updates after save operations
|
||||
@ -22,7 +26,7 @@ export default function CompanySettingsPage() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch("/api/dashboard/config");
|
||||
const data = await res.json();
|
||||
const data = (await res.json()) as CompanyConfigResponse;
|
||||
setCompany(data.company);
|
||||
setCsvUrl(data.company.csvUrl || "");
|
||||
setCsvUsername(data.company.csvUsername || "");
|
||||
@ -58,10 +62,10 @@ export default function CompanySettingsPage() {
|
||||
if (res.ok) {
|
||||
setMessage("Settings saved successfully!");
|
||||
// Update local state if needed
|
||||
const data = await res.json();
|
||||
const data = (await res.json()) as CompanyConfigResponse;
|
||||
setCompany(data.company);
|
||||
} else {
|
||||
const error = await res.json();
|
||||
const error = (await res.json()) as { message?: string; };
|
||||
setMessage(
|
||||
`Failed to save settings: ${error.message || "Unknown error"}`
|
||||
);
|
||||
@ -77,8 +81,8 @@ export default function CompanySettingsPage() {
|
||||
return <div className="text-center py-10">Loading settings...</div>;
|
||||
}
|
||||
|
||||
// Check for ADMIN access
|
||||
if (session?.user?.role !== "ADMIN") {
|
||||
// Check for admin access
|
||||
if (session?.user?.role !== "admin") {
|
||||
return (
|
||||
<div className="text-center py-10 bg-white rounded-xl shadow p-6">
|
||||
<h2 className="font-bold text-xl text-red-600 mb-2">Access Denied</h2>
|
||||
|
||||
@ -1,117 +1,71 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback, useRef } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { signOut, useSession } from "next-auth/react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import {
|
||||
SessionsLineChart,
|
||||
CategoriesBarChart,
|
||||
LanguagePieChart,
|
||||
TokenUsageChart,
|
||||
} from "../../../components/Charts";
|
||||
import { Company, MetricsResult, WordCloudWord } from "../../../lib/types";
|
||||
import MetricCard from "../../../components/ui/metric-card";
|
||||
import ModernLineChart from "../../../components/charts/line-chart";
|
||||
import ModernBarChart from "../../../components/charts/bar-chart";
|
||||
import ModernDonutChart from "../../../components/charts/donut-chart";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import {
|
||||
MessageSquare,
|
||||
Users,
|
||||
Clock,
|
||||
Zap,
|
||||
Euro,
|
||||
TrendingUp,
|
||||
CheckCircle,
|
||||
RefreshCw,
|
||||
LogOut,
|
||||
Calendar,
|
||||
MoreVertical,
|
||||
Globe,
|
||||
MessageCircle,
|
||||
} from "lucide-react";
|
||||
import MetricCard from "../../../components/MetricCard";
|
||||
import DonutChart from "../../../components/DonutChart";
|
||||
import WordCloud from "../../../components/WordCloud";
|
||||
import GeographicMap from "../../../components/GeographicMap";
|
||||
import ResponseTimeDistribution from "../../../components/ResponseTimeDistribution";
|
||||
import DateRangePicker from "../../../components/DateRangePicker";
|
||||
import TopQuestionsChart from "../../../components/TopQuestionsChart";
|
||||
import WelcomeBanner from "../../../components/WelcomeBanner";
|
||||
|
||||
interface MetricsApiResponse {
|
||||
metrics: MetricsResult;
|
||||
company: Company;
|
||||
}
|
||||
|
||||
// Safely wrapped component with useSession
|
||||
function DashboardContent() {
|
||||
const { data: session, status } = useSession();
|
||||
const router = useRouter();
|
||||
const { data: session, status } = useSession(); // Add status from useSession
|
||||
const router = useRouter(); // Initialize useRouter
|
||||
const [metrics, setMetrics] = useState<MetricsResult | null>(null);
|
||||
const [company, setCompany] = useState<Company | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [, setLoading] = useState<boolean>(false);
|
||||
const [refreshing, setRefreshing] = useState<boolean>(false);
|
||||
const [dateRange, setDateRange] = useState<{ minDate: string; maxDate: string } | null>(null);
|
||||
const [selectedStartDate, setSelectedStartDate] = useState<string>("");
|
||||
const [selectedEndDate, setSelectedEndDate] = useState<string>("");
|
||||
const [isInitialLoad, setIsInitialLoad] = useState<boolean>(true);
|
||||
|
||||
const isAuditor = session?.user?.role === "AUDITOR";
|
||||
|
||||
// Function to fetch metrics with optional date range
|
||||
const fetchMetrics = async (startDate?: string, endDate?: string, isInitial = false) => {
|
||||
setLoading(true);
|
||||
try {
|
||||
let url = "/api/dashboard/metrics";
|
||||
if (startDate && endDate) {
|
||||
url += `?startDate=${startDate}&endDate=${endDate}`;
|
||||
}
|
||||
|
||||
const res = await fetch(url);
|
||||
const data = await res.json();
|
||||
|
||||
setMetrics(data.metrics);
|
||||
setCompany(data.company);
|
||||
|
||||
// Set date range from API response (only on initial load)
|
||||
if (data.dateRange && isInitial) {
|
||||
setDateRange(data.dateRange);
|
||||
setSelectedStartDate(data.dateRange.minDate);
|
||||
setSelectedEndDate(data.dateRange.maxDate);
|
||||
setIsInitialLoad(false);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error fetching metrics:", error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
// Handle date range changes with proper memoization
|
||||
const handleDateRangeChange = useCallback((startDate: string, endDate: string) => {
|
||||
// Only update if dates actually changed to prevent unnecessary API calls
|
||||
if (startDate !== selectedStartDate || endDate !== selectedEndDate) {
|
||||
setSelectedStartDate(startDate);
|
||||
setSelectedEndDate(endDate);
|
||||
fetchMetrics(startDate, endDate);
|
||||
}
|
||||
}, [selectedStartDate, selectedEndDate]);
|
||||
const isAuditor = session?.user?.role === "auditor";
|
||||
|
||||
useEffect(() => {
|
||||
// Redirect if not authenticated
|
||||
if (status === "unauthenticated") {
|
||||
router.push("/login");
|
||||
return;
|
||||
return; // Stop further execution in this effect
|
||||
}
|
||||
|
||||
// Fetch metrics and company on mount if authenticated
|
||||
if (status === "authenticated" && isInitialLoad) {
|
||||
fetchMetrics(undefined, undefined, true);
|
||||
if (status === "authenticated") {
|
||||
const fetchData = async () => {
|
||||
setLoading(true);
|
||||
const res = await fetch("/api/dashboard/metrics");
|
||||
const data = (await res.json()) as MetricsApiResponse;
|
||||
console.log("Metrics from API:", {
|
||||
avgSessionLength: data.metrics.avgSessionLength,
|
||||
avgSessionTimeTrend: data.metrics.avgSessionTimeTrend,
|
||||
totalSessionDuration: data.metrics.totalSessionDuration,
|
||||
validSessionsForDuration: data.metrics.validSessionsForDuration,
|
||||
});
|
||||
setMetrics(data.metrics);
|
||||
setCompany(data.company);
|
||||
setLoading(false);
|
||||
};
|
||||
fetchData();
|
||||
}
|
||||
}, [status, router, isInitialLoad]);
|
||||
}, [status, router]); // Add status and router to dependency array
|
||||
|
||||
async function handleRefresh() {
|
||||
if (isAuditor) return;
|
||||
if (isAuditor) return; // Prevent auditors from refreshing
|
||||
try {
|
||||
setRefreshing(true);
|
||||
|
||||
// Make sure we have a company ID to send
|
||||
if (!company?.id) {
|
||||
setRefreshing(false);
|
||||
alert("Cannot refresh: Company ID is missing");
|
||||
@ -125,11 +79,12 @@ function DashboardContent() {
|
||||
});
|
||||
|
||||
if (res.ok) {
|
||||
// Refetch metrics
|
||||
const metricsRes = await fetch("/api/dashboard/metrics");
|
||||
const data = await metricsRes.json();
|
||||
const data = (await metricsRes.json()) as MetricsApiResponse;
|
||||
setMetrics(data.metrics);
|
||||
} else {
|
||||
const errorData = await res.json();
|
||||
const errorData = (await res.json()) as { error: string; };
|
||||
alert(`Failed to refresh sessions: ${errorData.error}`);
|
||||
}
|
||||
} finally {
|
||||
@ -137,129 +92,70 @@ function DashboardContent() {
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate sentiment distribution
|
||||
const getSentimentData = () => {
|
||||
if (!metrics) return { positive: 0, neutral: 0, negative: 0 };
|
||||
|
||||
if (
|
||||
metrics.sentimentPositiveCount !== undefined &&
|
||||
metrics.sentimentNeutralCount !== undefined &&
|
||||
metrics.sentimentNegativeCount !== undefined
|
||||
) {
|
||||
return {
|
||||
positive: metrics.sentimentPositiveCount,
|
||||
neutral: metrics.sentimentNeutralCount,
|
||||
negative: metrics.sentimentNegativeCount,
|
||||
};
|
||||
}
|
||||
|
||||
const total = metrics.totalSessions || 1;
|
||||
return {
|
||||
positive: Math.round(total * 0.6),
|
||||
neutral: Math.round(total * 0.3),
|
||||
negative: Math.round(total * 0.1),
|
||||
};
|
||||
};
|
||||
|
||||
// Prepare token usage data
|
||||
const getTokenData = () => {
|
||||
if (!metrics || !metrics.tokensByDay) {
|
||||
return { labels: [], values: [], costs: [] };
|
||||
}
|
||||
|
||||
const days = Object.keys(metrics.tokensByDay).sort();
|
||||
const labels = days.slice(-7);
|
||||
const values = labels.map((day) => metrics.tokensByDay?.[day] || 0);
|
||||
const costs = labels.map((day) => metrics.tokensCostByDay?.[day] || 0);
|
||||
|
||||
return { labels, values, costs };
|
||||
};
|
||||
|
||||
// Show loading state while session status is being determined
|
||||
if (status === "loading") {
|
||||
return (
|
||||
<div className="flex items-center justify-center min-h-[60vh]">
|
||||
<div className="text-center space-y-4">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto"></div>
|
||||
<p className="text-muted-foreground">Loading session...</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
return <div className="text-center py-10">Loading session...</div>;
|
||||
}
|
||||
|
||||
// If unauthenticated and not redirected yet (should be handled by useEffect, but as a fallback)
|
||||
if (status === "unauthenticated") {
|
||||
return (
|
||||
<div className="flex items-center justify-center min-h-[60vh]">
|
||||
<div className="text-center">
|
||||
<p className="text-muted-foreground">Redirecting to login...</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
return <div className="text-center py-10">Redirecting to login...</div>;
|
||||
}
|
||||
|
||||
if (loading || !metrics || !company) {
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Header Skeleton */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<div className="flex justify-between items-start">
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-4 w-64" />
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
<Skeleton className="h-10 w-24" />
|
||||
<Skeleton className="h-10 w-20" />
|
||||
</div>
|
||||
</div>
|
||||
</CardHeader>
|
||||
</Card>
|
||||
|
||||
{/* Metrics Grid Skeleton */}
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
{Array.from({ length: 8 }).map((_, i) => (
|
||||
<MetricCard key={i} title="" value="" isLoading />
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Charts Skeleton */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||
<Card className="lg:col-span-2">
|
||||
<CardHeader>
|
||||
<Skeleton className="h-6 w-32" />
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Skeleton className="h-64 w-full" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<Skeleton className="h-6 w-32" />
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Skeleton className="h-64 w-full" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
if (!metrics || !company) {
|
||||
return <div className="text-center py-10">Loading dashboard...</div>;
|
||||
}
|
||||
|
||||
// Data preparation functions
|
||||
const getSentimentData = () => {
|
||||
if (!metrics) return [];
|
||||
|
||||
const sentimentData = {
|
||||
positive: metrics.sentimentPositiveCount ?? 0,
|
||||
neutral: metrics.sentimentNeutralCount ?? 0,
|
||||
negative: metrics.sentimentNegativeCount ?? 0,
|
||||
};
|
||||
|
||||
return [
|
||||
{ name: "Positive", value: sentimentData.positive, color: "rgb(34, 197, 94)" },
|
||||
{ name: "Neutral", value: sentimentData.neutral, color: "rgb(168, 162, 158)" },
|
||||
{ name: "Negative", value: sentimentData.negative, color: "rgb(239, 68, 68)" },
|
||||
];
|
||||
};
|
||||
|
||||
const getSessionsOverTimeData = () => {
|
||||
if (!metrics?.days) return [];
|
||||
|
||||
return Object.entries(metrics.days).map(([date, value]) => ({
|
||||
date: new Date(date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' }),
|
||||
value: value as number,
|
||||
}));
|
||||
};
|
||||
|
||||
const getCategoriesData = () => {
|
||||
if (!metrics?.categories) return [];
|
||||
|
||||
return Object.entries(metrics.categories).map(([name, value]) => ({
|
||||
name: name.length > 15 ? name.substring(0, 15) + '...' : name,
|
||||
value: value as number,
|
||||
}));
|
||||
};
|
||||
|
||||
const getLanguagesData = () => {
|
||||
if (!metrics?.languages) return [];
|
||||
|
||||
return Object.entries(metrics.languages).map(([name, value]) => ({
|
||||
name,
|
||||
value: value as number,
|
||||
}));
|
||||
};
|
||||
|
||||
// Function to prepare word cloud data from metrics.wordCloudData
|
||||
const getWordCloudData = (): WordCloudWord[] => {
|
||||
if (!metrics?.wordCloudData) return [];
|
||||
if (!metrics || !metrics.wordCloudData) return [];
|
||||
return metrics.wordCloudData;
|
||||
};
|
||||
|
||||
// Function to prepare country data for the map using actual metrics
|
||||
const getCountryData = () => {
|
||||
if (!metrics?.countries) return {};
|
||||
return Object.entries(metrics.countries).reduce(
|
||||
if (!metrics || !metrics.countries) return {};
|
||||
|
||||
// Convert the countries object from metrics to the format expected by GeographicMap
|
||||
const result = Object.entries(metrics.countries).reduce(
|
||||
(acc, [code, count]) => {
|
||||
if (code && count) {
|
||||
acc[code] = count;
|
||||
@ -268,8 +164,11 @@ function DashboardContent() {
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Function to prepare response time distribution data
|
||||
const getResponseTimeData = () => {
|
||||
const avgTime = metrics.avgResponseTime || 1.5;
|
||||
const simulatedData: number[] = [];
|
||||
@ -284,318 +183,252 @@ function DashboardContent() {
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Apple-Style Unified Header */}
|
||||
<Card className="border-0 bg-white shadow-sm">
|
||||
<CardHeader className="pb-6">
|
||||
<div className="flex flex-col space-y-6">
|
||||
{/* Top row: Company info and actions */}
|
||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||
<div className="space-y-1">
|
||||
<div className="flex items-center gap-3">
|
||||
<h1 className="text-2xl font-semibold text-gray-900 tracking-tight">{company.name}</h1>
|
||||
<Badge variant="secondary" className="text-xs font-medium bg-gray-100 text-gray-700 border-0">
|
||||
Analytics Dashboard
|
||||
</Badge>
|
||||
</div>
|
||||
<p className="text-sm text-gray-500">
|
||||
Last updated{" "}
|
||||
<span className="font-medium text-gray-700">
|
||||
{new Date(metrics.lastUpdated || Date.now()).toLocaleString()}
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<Button
|
||||
onClick={handleRefresh}
|
||||
disabled={refreshing || isAuditor}
|
||||
size="sm"
|
||||
className="gap-2 bg-blue-600 hover:bg-blue-700 border-0 shadow-sm"
|
||||
<WelcomeBanner companyName={company.name} />
|
||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center bg-white p-6 rounded-2xl shadow-lg ring-1 ring-slate-200/50">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-slate-800">{company.name}</h1>
|
||||
<p className="text-slate-500 mt-1">
|
||||
Dashboard updated{" "}
|
||||
<span className="font-medium text-slate-600">
|
||||
{new Date(metrics.lastUpdated || Date.now()).toLocaleString()}
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-3 mt-4 sm:mt-0">
|
||||
<button
|
||||
className="bg-sky-600 text-white py-2 px-5 rounded-lg shadow hover:bg-sky-700 transition-colors disabled:opacity-60 disabled:cursor-not-allowed flex items-center text-sm font-medium"
|
||||
onClick={handleRefresh}
|
||||
disabled={refreshing || isAuditor}
|
||||
>
|
||||
{refreshing ? (
|
||||
<>
|
||||
<svg
|
||||
className="animate-spin -ml-1 mr-2 h-4 w-4 text-white"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<RefreshCw className={`h-4 w-4 ${refreshing ? 'animate-spin' : ''}`} />
|
||||
{refreshing ? "Refreshing..." : "Refresh"}
|
||||
</Button>
|
||||
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="outline" size="sm" className="border-gray-200 hover:bg-gray-50">
|
||||
<MoreVertical className="h-4 w-4" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end" className="border-gray-200 shadow-lg">
|
||||
<DropdownMenuItem onClick={() => signOut({ callbackUrl: "/login" })}>
|
||||
<LogOut className="h-4 w-4 mr-2" />
|
||||
Sign out
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Date Range Controls */}
|
||||
{dateRange && (
|
||||
<div className="border-t border-gray-100 pt-6">
|
||||
<div className="flex flex-col sm:flex-row items-start sm:items-center gap-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<Calendar className="h-4 w-4 text-gray-500" />
|
||||
<span className="text-sm font-medium text-gray-700">Date Range:</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="text-sm text-gray-600">From:</label>
|
||||
<input
|
||||
type="date"
|
||||
value={selectedStartDate}
|
||||
min={dateRange.minDate}
|
||||
max={dateRange.maxDate}
|
||||
onChange={(e) => handleDateRangeChange(e.target.value, selectedEndDate)}
|
||||
className="px-3 py-1.5 text-sm border border-gray-200 rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="text-sm text-gray-600">To:</label>
|
||||
<input
|
||||
type="date"
|
||||
value={selectedEndDate}
|
||||
min={dateRange.minDate}
|
||||
max={dateRange.maxDate}
|
||||
onChange={(e) => handleDateRangeChange(selectedStartDate, e.target.value)}
|
||||
className="px-3 py-1.5 text-sm border border-gray-200 rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
const endDate = new Date().toISOString().split('T')[0];
|
||||
const startDate = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString().split('T')[0];
|
||||
handleDateRangeChange(startDate, endDate);
|
||||
}}
|
||||
className="text-xs border-gray-200 hover:bg-gray-50"
|
||||
>
|
||||
Last 7 days
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
const endDate = new Date().toISOString().split('T')[0];
|
||||
const startDate = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0];
|
||||
handleDateRangeChange(startDate, endDate);
|
||||
}}
|
||||
className="text-xs border-gray-200 hover:bg-gray-50"
|
||||
>
|
||||
Last 30 days
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => handleDateRangeChange(dateRange.minDate, dateRange.maxDate)}
|
||||
className="text-xs border-gray-200 hover:bg-gray-50"
|
||||
>
|
||||
All time
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-xs text-gray-500 mt-2">
|
||||
Available data: {new Date(dateRange.minDate).toLocaleDateString()} - {new Date(dateRange.maxDate).toLocaleDateString()}
|
||||
</p>
|
||||
</div>
|
||||
<circle
|
||||
className="opacity-25"
|
||||
cx="12"
|
||||
cy="12"
|
||||
r="10"
|
||||
stroke="currentColor"
|
||||
strokeWidth="4"
|
||||
></circle>
|
||||
<path
|
||||
className="opacity-75"
|
||||
fill="currentColor"
|
||||
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
|
||||
></path>
|
||||
</svg>
|
||||
Refreshing...
|
||||
</>
|
||||
) : (
|
||||
"Refresh Data"
|
||||
)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
</Card>
|
||||
|
||||
{/* Modern Metrics Grid */}
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
</button>
|
||||
<button
|
||||
className="bg-slate-100 text-slate-700 py-2 px-5 rounded-lg shadow hover:bg-slate-200 transition-colors flex items-center text-sm font-medium"
|
||||
onClick={() => signOut({ callbackUrl: "/login" })}
|
||||
>
|
||||
Sign out
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4">
|
||||
<MetricCard
|
||||
title="Total Sessions"
|
||||
value={metrics.totalSessions?.toLocaleString()}
|
||||
icon={<MessageSquare className="h-5 w-5" />}
|
||||
value={metrics.totalSessions}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M7 20l4-16m2 16l4-16M6 9h14M4 15h14"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
trend={{
|
||||
value: metrics.sessionTrend ?? 0,
|
||||
isPositive: (metrics.sessionTrend ?? 0) >= 0,
|
||||
}}
|
||||
variant="primary"
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Unique Users"
|
||||
value={metrics.uniqueUsers?.toLocaleString()}
|
||||
icon={<Users className="h-5 w-5" />}
|
||||
value={metrics.uniqueUsers}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
trend={{
|
||||
value: metrics.usersTrend ?? 0,
|
||||
isPositive: (metrics.usersTrend ?? 0) >= 0,
|
||||
}}
|
||||
variant="success"
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Avg. Session Time"
|
||||
value={`${Math.round(metrics.avgSessionLength || 0)}s`}
|
||||
icon={<Clock className="h-5 w-5" />}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
trend={{
|
||||
value: metrics.avgSessionTimeTrend ?? 0,
|
||||
isPositive: (metrics.avgSessionTimeTrend ?? 0) >= 0,
|
||||
}}
|
||||
variant="primary"
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Avg. Response Time"
|
||||
value={`${metrics.avgResponseTime?.toFixed(1) || 0}s`}
|
||||
icon={<Zap className="h-5 w-5" />}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M13 10V3L4 14h7v7l9-11h-7z"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
trend={{
|
||||
value: metrics.avgResponseTimeTrend ?? 0,
|
||||
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0,
|
||||
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0, // Lower response time is better
|
||||
}}
|
||||
variant="warning"
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Daily Costs"
|
||||
value={`€${metrics.avgDailyCosts?.toFixed(4) || '0.0000'}`}
|
||||
icon={<Euro className="h-5 w-5" />}
|
||||
description="Average per day"
|
||||
variant="warning"
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Peak Usage"
|
||||
value={metrics.peakUsageTime || 'N/A'}
|
||||
icon={<TrendingUp className="h-5 w-5" />}
|
||||
description="Busiest hour"
|
||||
variant="primary"
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Resolution Rate"
|
||||
value={`${metrics.resolvedChatsPercentage?.toFixed(1) || '0.0'}%`}
|
||||
icon={<CheckCircle className="h-5 w-5" />}
|
||||
trend={{
|
||||
value: metrics.resolvedChatsPercentage ?? 0,
|
||||
isPositive: (metrics.resolvedChatsPercentage ?? 0) >= 80,
|
||||
}}
|
||||
variant={metrics.resolvedChatsPercentage && metrics.resolvedChatsPercentage >= 80 ? "success" : "warning"}
|
||||
/>
|
||||
|
||||
<MetricCard
|
||||
title="Active Languages"
|
||||
value={Object.keys(metrics.languages || {}).length}
|
||||
icon={<Globe className="h-5 w-5" />}
|
||||
description="Languages detected"
|
||||
variant="success"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Charts Section */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||
<ModernLineChart
|
||||
data={getSessionsOverTimeData()}
|
||||
title="Sessions Over Time"
|
||||
className="lg:col-span-2"
|
||||
height={350}
|
||||
/>
|
||||
|
||||
<ModernDonutChart
|
||||
data={getSentimentData()}
|
||||
title="Conversation Sentiment"
|
||||
centerText={{
|
||||
title: "Total",
|
||||
value: metrics.totalSessions || 0,
|
||||
}}
|
||||
height={350}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<ModernBarChart
|
||||
data={getCategoriesData()}
|
||||
title="Sessions by Category"
|
||||
height={350}
|
||||
/>
|
||||
|
||||
<ModernDonutChart
|
||||
data={getLanguagesData()}
|
||||
title="Languages Used"
|
||||
height={350}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Geographic and Topics Section */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Globe className="h-5 w-5" />
|
||||
Geographic Distribution
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<GeographicMap countries={getCountryData()} />
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<MessageCircle className="h-5 w-5" />
|
||||
Common Topics
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="h-[300px]">
|
||||
<WordCloud words={getWordCloudData()} width={500} height={300} />
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
{/* Top Questions Chart */}
|
||||
<TopQuestionsChart data={metrics.topQuestions || []} />
|
||||
|
||||
{/* Response Time Distribution */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Response Time Distribution</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<ResponseTimeDistribution
|
||||
data={getResponseTimeData()}
|
||||
average={metrics.avgResponseTime || 0}
|
||||
<div className="bg-white p-6 rounded-xl shadow lg:col-span-2">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Sessions Over Time
|
||||
</h3>
|
||||
<SessionsLineChart sessionsPerDay={metrics.days} />
|
||||
</div>
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Conversation Sentiment
|
||||
</h3>
|
||||
<DonutChart
|
||||
data={{
|
||||
labels: ["Positive", "Neutral", "Negative"],
|
||||
values: [
|
||||
getSentimentData().positive,
|
||||
getSentimentData().neutral,
|
||||
getSentimentData().negative,
|
||||
],
|
||||
colors: ["#1cad7c", "#a1a1a1", "#dc2626"],
|
||||
}}
|
||||
centerText={{
|
||||
title: "Total",
|
||||
value: metrics.totalSessions,
|
||||
}}
|
||||
/>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Token Usage Summary */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||
<CardTitle>AI Usage & Costs</CardTitle>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Badge variant="outline" className="gap-1">
|
||||
<span className="font-semibold">Total Tokens:</span>
|
||||
{metrics.totalTokens?.toLocaleString() || 0}
|
||||
</Badge>
|
||||
<Badge variant="outline" className="gap-1">
|
||||
<span className="font-semibold">Total Cost:</span>
|
||||
€{metrics.totalTokensEur?.toFixed(4) || 0}
|
||||
</Badge>
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Sessions by Category
|
||||
</h3>
|
||||
<CategoriesBarChart categories={metrics.categories || {}} />
|
||||
</div>
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Languages Used
|
||||
</h3>
|
||||
<LanguagePieChart languages={metrics.languages || {}} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Geographic Distribution
|
||||
</h3>
|
||||
<GeographicMap countries={getCountryData()} />
|
||||
</div>
|
||||
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Common Topics
|
||||
</h3>
|
||||
<div className="h-[300px]">
|
||||
<WordCloud words={getWordCloudData()} width={500} height={400} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Response Time Distribution
|
||||
</h3>
|
||||
<ResponseTimeDistribution
|
||||
data={getResponseTimeData()}
|
||||
average={metrics.avgResponseTime || 0}
|
||||
/>
|
||||
</div>
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-3 mb-4">
|
||||
<h3 className="font-bold text-lg text-gray-800">
|
||||
Token Usage & Costs
|
||||
</h3>
|
||||
<div className="flex flex-col sm:flex-row gap-2 sm:gap-4 w-full sm:w-auto">
|
||||
<div className="text-sm bg-blue-50 text-blue-700 px-3 py-1 rounded-full flex items-center">
|
||||
<span className="font-semibold mr-1">Total Tokens:</span>
|
||||
{metrics.totalTokens?.toLocaleString() || 0}
|
||||
</div>
|
||||
<div className="text-sm bg-green-50 text-green-700 px-3 py-1 rounded-full flex items-center">
|
||||
<span className="font-semibold mr-1">Total Cost:</span>€
|
||||
{metrics.totalTokensEur?.toFixed(4) || 0}
|
||||
</div>
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="text-center py-8 text-muted-foreground">
|
||||
<p>Token usage chart will be implemented with historical data</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<TokenUsageChart tokenData={getTokenData()} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Our exported component
|
||||
export default function DashboardPage() {
|
||||
return <DashboardContent />;
|
||||
}
|
||||
|
||||
@ -4,19 +4,6 @@ import { useSession } from "next-auth/react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useState } from "react";
|
||||
import { FC } from "react";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import {
|
||||
BarChart3,
|
||||
MessageSquare,
|
||||
Settings,
|
||||
Users,
|
||||
ArrowRight,
|
||||
TrendingUp,
|
||||
Shield,
|
||||
Zap,
|
||||
} from "lucide-react";
|
||||
|
||||
const DashboardPage: FC = () => {
|
||||
const { data: session, status } = useSession();
|
||||
@ -34,223 +21,82 @@ const DashboardPage: FC = () => {
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="flex items-center justify-center min-h-[60vh]">
|
||||
<div className="text-center space-y-4">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto"></div>
|
||||
<p className="text-lg text-muted-foreground">Loading dashboard...</p>
|
||||
<div className="flex items-center justify-center min-h-[40vh]">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-t-2 border-b-2 border-sky-500 mx-auto mb-4"></div>
|
||||
<p className="text-lg text-gray-600">Loading dashboard...</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const navigationCards = [
|
||||
{
|
||||
title: "Analytics Overview",
|
||||
description: "View comprehensive metrics, charts, and insights from your chat sessions",
|
||||
icon: <BarChart3 className="h-6 w-6" />,
|
||||
href: "/dashboard/overview",
|
||||
variant: "primary" as const,
|
||||
features: ["Real-time metrics", "Interactive charts", "Trend analysis"],
|
||||
},
|
||||
{
|
||||
title: "Session Browser",
|
||||
description: "Browse, search, and analyze individual conversation sessions",
|
||||
icon: <MessageSquare className="h-6 w-6" />,
|
||||
href: "/dashboard/sessions",
|
||||
variant: "success" as const,
|
||||
features: ["Session search", "Conversation details", "Export data"],
|
||||
},
|
||||
...(session?.user?.role === "ADMIN"
|
||||
? [
|
||||
{
|
||||
title: "Company Settings",
|
||||
description: "Configure company settings, integrations, and API connections",
|
||||
icon: <Settings className="h-6 w-6" />,
|
||||
href: "/dashboard/company",
|
||||
variant: "warning" as const,
|
||||
features: ["API configuration", "Integration settings", "Data management"],
|
||||
adminOnly: true,
|
||||
},
|
||||
{
|
||||
title: "User Management",
|
||||
description: "Invite team members and manage user accounts and permissions",
|
||||
icon: <Users className="h-6 w-6" />,
|
||||
href: "/dashboard/users",
|
||||
variant: "default" as const,
|
||||
features: ["User invitations", "Role management", "Access control"],
|
||||
adminOnly: true,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
];
|
||||
|
||||
const getCardClasses = (variant: string) => {
|
||||
switch (variant) {
|
||||
case "primary":
|
||||
return "border-primary/20 bg-linear-to-br from-primary/5 to-primary/10 hover:from-primary/10 hover:to-primary/15";
|
||||
case "success":
|
||||
return "border-green-200 bg-linear-to-br from-green-50 to-green-100 hover:from-green-100 hover:to-green-150 dark:border-green-800 dark:from-green-950 dark:to-green-900";
|
||||
case "warning":
|
||||
return "border-amber-200 bg-linear-to-br from-amber-50 to-amber-100 hover:from-amber-100 hover:to-amber-150 dark:border-amber-800 dark:from-amber-950 dark:to-amber-900";
|
||||
default:
|
||||
return "border-border bg-linear-to-br from-card to-muted/20 hover:from-muted/30 hover:to-muted/40";
|
||||
}
|
||||
};
|
||||
|
||||
const getIconClasses = (variant: string) => {
|
||||
switch (variant) {
|
||||
case "primary":
|
||||
return "bg-primary/10 text-primary border-primary/20";
|
||||
case "success":
|
||||
return "bg-green-100 text-green-600 border-green-200 dark:bg-green-900 dark:text-green-400 dark:border-green-800";
|
||||
case "warning":
|
||||
return "bg-amber-100 text-amber-600 border-amber-200 dark:bg-amber-900 dark:text-amber-400 dark:border-amber-800";
|
||||
default:
|
||||
return "bg-muted text-muted-foreground border-border";
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Welcome Header */}
|
||||
<Card className="border-0 bg-linear-to-r from-primary/5 via-primary/10 to-primary/5">
|
||||
<CardHeader>
|
||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center gap-3">
|
||||
<h1 className="text-3xl font-bold tracking-tight">
|
||||
Welcome back, {session?.user?.name || "User"}!
|
||||
</h1>
|
||||
<Badge variant="secondary" className="text-xs">
|
||||
{session?.user?.role}
|
||||
</Badge>
|
||||
</div>
|
||||
<p className="text-muted-foreground">
|
||||
Choose a section below to explore your analytics dashboard
|
||||
<div className="space-y-6">
|
||||
<div className="bg-white rounded-xl shadow p-6">
|
||||
<h1 className="text-2xl font-bold mb-4">Dashboard</h1>
|
||||
|
||||
<div className="grid sm:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
<div className="bg-gradient-to-br from-sky-50 to-sky-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
||||
<h2 className="text-lg font-semibold text-sky-700">Analytics</h2>
|
||||
<p className="text-gray-600 mt-2 mb-4">
|
||||
View your chat session metrics and analytics
|
||||
</p>
|
||||
<button
|
||||
onClick={() => router.push("/dashboard/overview")}
|
||||
className="bg-sky-500 hover:bg-sky-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
||||
>
|
||||
View Analytics
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="bg-gradient-to-br from-emerald-50 to-emerald-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
||||
<h2 className="text-lg font-semibold text-emerald-700">Sessions</h2>
|
||||
<p className="text-gray-600 mt-2 mb-4">
|
||||
Browse and analyze conversation sessions
|
||||
</p>
|
||||
<button
|
||||
onClick={() => router.push("/dashboard/sessions")}
|
||||
className="bg-emerald-500 hover:bg-emerald-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
||||
>
|
||||
View Sessions
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{session?.user?.role === "admin" && (
|
||||
<div className="bg-gradient-to-br from-purple-50 to-purple-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
||||
<h2 className="text-lg font-semibold text-purple-700">
|
||||
Company Settings
|
||||
</h2>
|
||||
<p className="text-gray-600 mt-2 mb-4">
|
||||
Configure company settings and integrations
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="flex items-center gap-1 text-sm text-muted-foreground">
|
||||
<Shield className="h-4 w-4" />
|
||||
Secure Dashboard
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</CardHeader>
|
||||
</Card>
|
||||
|
||||
{/* Navigation Cards */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
{navigationCards.map((card, index) => (
|
||||
<Card
|
||||
key={index}
|
||||
className={`relative overflow-hidden transition-all duration-200 hover:shadow-lg hover:-translate-y-0.5 cursor-pointer ${getCardClasses(
|
||||
card.variant
|
||||
)}`}
|
||||
onClick={() => router.push(card.href)}
|
||||
>
|
||||
{/* Subtle gradient overlay */}
|
||||
<div className="absolute inset-0 bg-linear-to-br from-white/50 to-transparent dark:from-white/5 pointer-events-none" />
|
||||
|
||||
<CardHeader className="relative">
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-center gap-3">
|
||||
<div
|
||||
className={`flex h-12 w-12 shrink-0 items-center justify-center rounded-full border transition-colors ${getIconClasses(
|
||||
card.variant
|
||||
)}`}
|
||||
>
|
||||
{card.icon}
|
||||
</div>
|
||||
<div>
|
||||
<CardTitle className="text-xl font-semibold flex items-center gap-2">
|
||||
{card.title}
|
||||
{card.adminOnly && (
|
||||
<Badge variant="outline" className="text-xs">
|
||||
Admin
|
||||
</Badge>
|
||||
)}
|
||||
</CardTitle>
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-muted-foreground leading-relaxed">
|
||||
{card.description}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent className="relative space-y-4">
|
||||
{/* Features List */}
|
||||
<div className="space-y-2">
|
||||
{card.features.map((feature, featureIndex) => (
|
||||
<div key={featureIndex} className="flex items-center gap-2 text-sm">
|
||||
<div className="h-1.5 w-1.5 rounded-full bg-current opacity-60" />
|
||||
<span className="text-muted-foreground">{feature}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Action Button */}
|
||||
<Button
|
||||
className="w-full gap-2 mt-4"
|
||||
variant={card.variant === "primary" ? "default" : "outline"}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
router.push(card.href);
|
||||
}}
|
||||
<button
|
||||
onClick={() => router.push("/dashboard/company")}
|
||||
className="bg-purple-500 hover:bg-purple-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
||||
>
|
||||
<span>
|
||||
{card.title === "Analytics Overview" && "View Analytics"}
|
||||
{card.title === "Session Browser" && "Browse Sessions"}
|
||||
{card.title === "Company Settings" && "Manage Settings"}
|
||||
{card.title === "User Management" && "Manage Users"}
|
||||
</span>
|
||||
<ArrowRight className="h-4 w-4" />
|
||||
</Button>
|
||||
</CardContent>
|
||||
</Card>
|
||||
))}
|
||||
</div>
|
||||
Manage Settings
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Quick Stats */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<TrendingUp className="h-5 w-5" />
|
||||
Quick Stats
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-3 gap-6">
|
||||
<div className="text-center space-y-2">
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<Zap className="h-5 w-5 text-primary" />
|
||||
<span className="text-2xl font-bold">Real-time</span>
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground">Data updates</p>
|
||||
{session?.user?.role === "admin" && (
|
||||
<div className="bg-gradient-to-br from-amber-50 to-amber-100 p-6 rounded-xl shadow-sm hover:shadow-md transition-shadow">
|
||||
<h2 className="text-lg font-semibold text-amber-700">
|
||||
User Management
|
||||
</h2>
|
||||
<p className="text-gray-600 mt-2 mb-4">
|
||||
Invite and manage user accounts
|
||||
</p>
|
||||
<button
|
||||
onClick={() => router.push("/dashboard/users")}
|
||||
className="bg-amber-500 hover:bg-amber-600 text-white px-4 py-2 rounded-lg text-sm font-medium transition-colors"
|
||||
>
|
||||
Manage Users
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="text-center space-y-2">
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<Shield className="h-5 w-5 text-green-600" />
|
||||
<span className="text-2xl font-bold">Secure</span>
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground">Data protection</p>
|
||||
</div>
|
||||
|
||||
<div className="text-center space-y-2">
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<BarChart3 className="h-5 w-5 text-blue-600" />
|
||||
<span className="text-2xl font-bold">Advanced</span>
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground">Analytics</p>
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@ -5,10 +5,13 @@ import { useParams, useRouter } from "next/navigation"; // Import useRouter
|
||||
import { useSession } from "next-auth/react"; // Import useSession
|
||||
import SessionDetails from "../../../../components/SessionDetails";
|
||||
import TranscriptViewer from "../../../../components/TranscriptViewer";
|
||||
import MessageViewer from "../../../../components/MessageViewer";
|
||||
import { ChatSession } from "../../../../lib/types";
|
||||
import Link from "next/link";
|
||||
|
||||
interface SessionApiResponse {
|
||||
session: ChatSession;
|
||||
}
|
||||
|
||||
export default function SessionViewPage() {
|
||||
const params = useParams();
|
||||
const router = useRouter(); // Initialize useRouter
|
||||
@ -31,13 +34,13 @@ export default function SessionViewPage() {
|
||||
try {
|
||||
const response = await fetch(`/api/dashboard/session/${id}`);
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
const errorData = (await response.json()) as { error: string; };
|
||||
throw new Error(
|
||||
errorData.error ||
|
||||
`Failed to fetch session: ${response.statusText}`
|
||||
);
|
||||
}
|
||||
const data = await response.json();
|
||||
const data = (await response.json()) as SessionApiResponse;
|
||||
setSession(data.session);
|
||||
} catch (err) {
|
||||
setError(
|
||||
@ -108,7 +111,7 @@ export default function SessionViewPage() {
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-linear-to-br from-slate-50 to-sky-100 p-4 md:p-6">
|
||||
<div className="min-h-screen bg-gradient-to-br from-slate-50 to-sky-100 p-4 md:p-6">
|
||||
<div className="max-w-4xl mx-auto">
|
||||
<div className="mb-6">
|
||||
<Link
|
||||
@ -137,26 +140,31 @@ export default function SessionViewPage() {
|
||||
<div>
|
||||
<SessionDetails session={session} />
|
||||
</div>
|
||||
|
||||
{/* Show parsed messages if available */}
|
||||
{session.messages && session.messages.length > 0 && (
|
||||
<div>
|
||||
<MessageViewer messages={session.messages} />
|
||||
{session.transcriptContent &&
|
||||
session.transcriptContent.trim() !== "" ? (
|
||||
<div className="mt-0">
|
||||
<TranscriptViewer
|
||||
transcriptContent={session.transcriptContent}
|
||||
transcriptUrl={session.fullTranscriptUrl}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show transcript URL if available */}
|
||||
{session.fullTranscriptUrl && (
|
||||
) : (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">Source Transcript</h3>
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sky-600 hover:underline"
|
||||
>
|
||||
View Original Transcript
|
||||
</a>
|
||||
<h3 className="font-bold text-lg mb-3">Transcript</h3>
|
||||
<p className="text-gray-600">
|
||||
No transcript content available for this session.
|
||||
</p>
|
||||
{session.fullTranscriptUrl &&
|
||||
process.env.NODE_ENV !== "production" && (
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sky-600 hover:underline mt-2 inline-block"
|
||||
>
|
||||
View Source Transcript URL
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@ -14,6 +14,11 @@ interface FilterOptions {
|
||||
languages: string[];
|
||||
}
|
||||
|
||||
interface SessionsApiResponse {
|
||||
sessions: ChatSession[];
|
||||
totalSessions: number;
|
||||
}
|
||||
|
||||
export default function SessionsPage() {
|
||||
const [sessions, setSessions] = useState<ChatSession[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
@ -58,7 +63,7 @@ export default function SessionsPage() {
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch filter options");
|
||||
}
|
||||
const data = await response.json();
|
||||
const data = (await response.json()) as FilterOptions;
|
||||
setFilterOptions(data);
|
||||
} catch (err) {
|
||||
setError(
|
||||
@ -88,7 +93,7 @@ export default function SessionsPage() {
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch sessions: ${response.statusText}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
const data = (await response.json()) as SessionsApiResponse;
|
||||
setSessions(data.sessions || []);
|
||||
setTotalPages(Math.ceil((data.totalSessions || 0) / pageSize));
|
||||
} catch (err) {
|
||||
|
||||
@ -37,7 +37,7 @@ export default function DashboardSettings({
|
||||
else setMessage("Failed.");
|
||||
}
|
||||
|
||||
if (session.user.role !== "ADMIN") return null;
|
||||
if (session.user.role !== "admin") return null;
|
||||
|
||||
return (
|
||||
<div className="bg-white p-6 rounded-xl shadow mb-6">
|
||||
|
||||
@ -12,6 +12,10 @@ interface UserManagementProps {
|
||||
session: UserSession;
|
||||
}
|
||||
|
||||
interface UsersApiResponse {
|
||||
users: UserItem[];
|
||||
}
|
||||
|
||||
export default function UserManagement({ session }: UserManagementProps) {
|
||||
const [users, setUsers] = useState<UserItem[]>([]);
|
||||
const [email, setEmail] = useState<string>("");
|
||||
@ -21,7 +25,7 @@ export default function UserManagement({ session }: UserManagementProps) {
|
||||
useEffect(() => {
|
||||
fetch("/api/dashboard/users")
|
||||
.then((r) => r.json())
|
||||
.then((data) => setUsers(data.users));
|
||||
.then((data) => setUsers((data as UsersApiResponse).users));
|
||||
}, []);
|
||||
|
||||
async function inviteUser() {
|
||||
@ -34,7 +38,7 @@ export default function UserManagement({ session }: UserManagementProps) {
|
||||
else setMsg("Failed.");
|
||||
}
|
||||
|
||||
if (session.user.role !== "ADMIN") return null;
|
||||
if (session.user.role !== "admin") return null;
|
||||
|
||||
return (
|
||||
<div className="bg-white p-6 rounded-xl shadow mb-6">
|
||||
@ -52,8 +56,8 @@ export default function UserManagement({ session }: UserManagementProps) {
|
||||
onChange={(e) => setRole(e.target.value)}
|
||||
>
|
||||
<option value="user">User</option>
|
||||
<option value="ADMIN">Admin</option>
|
||||
<option value="AUDITOR">Auditor</option>
|
||||
<option value="admin">Admin</option>
|
||||
<option value="auditor">Auditor</option>
|
||||
</select>
|
||||
<button
|
||||
className="bg-blue-600 text-white rounded px-4 py-2 sm:py-0 w-full sm:w-auto"
|
||||
|
||||
@ -9,6 +9,10 @@ interface UserItem {
|
||||
role: string;
|
||||
}
|
||||
|
||||
interface UsersApiResponse {
|
||||
users: UserItem[];
|
||||
}
|
||||
|
||||
export default function UserManagementPage() {
|
||||
const { data: session, status } = useSession();
|
||||
const [users, setUsers] = useState<UserItem[]>([]);
|
||||
@ -27,7 +31,7 @@ export default function UserManagementPage() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch("/api/dashboard/users");
|
||||
const data = await res.json();
|
||||
const data = (await res.json()) as UsersApiResponse;
|
||||
setUsers(data.users);
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch users:", error);
|
||||
@ -52,7 +56,7 @@ export default function UserManagementPage() {
|
||||
// Refresh the user list
|
||||
fetchUsers();
|
||||
} else {
|
||||
const error = await res.json();
|
||||
const error = (await res.json()) as { message?: string; };
|
||||
setMessage(
|
||||
`Failed to invite user: ${error.message || "Unknown error"}`
|
||||
);
|
||||
@ -69,7 +73,7 @@ export default function UserManagementPage() {
|
||||
}
|
||||
|
||||
// Check for admin access
|
||||
if (session?.user?.role !== "ADMIN") {
|
||||
if (session?.user?.role !== "admin") {
|
||||
return (
|
||||
<div className="text-center py-10 bg-white rounded-xl shadow p-6">
|
||||
<h2 className="font-bold text-xl text-red-600 mb-2">Access Denied</h2>
|
||||
@ -124,8 +128,8 @@ export default function UserManagementPage() {
|
||||
onChange={(e) => setRole(e.target.value)}
|
||||
>
|
||||
<option value="user">User</option>
|
||||
<option value="ADMIN">Admin</option>
|
||||
<option value="AUDITOR">Auditor</option>
|
||||
<option value="admin">Admin</option>
|
||||
<option value="auditor">Auditor</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
@ -183,9 +187,9 @@ export default function UserManagementPage() {
|
||||
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-500">
|
||||
<span
|
||||
className={`px-2 inline-flex text-xs leading-5 font-semibold rounded-full ${
|
||||
user.role === "ADMIN"
|
||||
user.role === "admin"
|
||||
? "bg-purple-100 text-purple-800"
|
||||
: user.role === "AUDITOR"
|
||||
: user.role === "auditor"
|
||||
? "bg-blue-100 text-blue-800"
|
||||
: "bg-green-100 text-green-800"
|
||||
}`}
|
||||
|
||||
138
app/globals.css
138
app/globals.css
@ -1,139 +1 @@
|
||||
@import "tailwindcss";
|
||||
@import "tw-animate-css";
|
||||
|
||||
@custom-variant dark (&:is(.dark *));
|
||||
|
||||
@theme inline {
|
||||
--radius-sm: calc(var(--radius) - 4px);
|
||||
--radius-md: calc(var(--radius) - 2px);
|
||||
--radius-lg: var(--radius);
|
||||
--radius-xl: calc(var(--radius) + 4px);
|
||||
--color-background: var(--background);
|
||||
--color-foreground: var(--foreground);
|
||||
--color-card: var(--card);
|
||||
--color-card-foreground: var(--card-foreground);
|
||||
--color-popover: var(--popover);
|
||||
--color-popover-foreground: var(--popover-foreground);
|
||||
--color-primary: var(--primary);
|
||||
--color-primary-foreground: var(--primary-foreground);
|
||||
--color-secondary: var(--secondary);
|
||||
--color-secondary-foreground: var(--secondary-foreground);
|
||||
--color-muted: var(--muted);
|
||||
--color-muted-foreground: var(--muted-foreground);
|
||||
--color-accent: var(--accent);
|
||||
--color-accent-foreground: var(--accent-foreground);
|
||||
--color-destructive: var(--destructive);
|
||||
--color-border: var(--border);
|
||||
--color-input: var(--input);
|
||||
--color-ring: var(--ring);
|
||||
--color-chart-1: var(--chart-1);
|
||||
--color-chart-2: var(--chart-2);
|
||||
--color-chart-3: var(--chart-3);
|
||||
--color-chart-4: var(--chart-4);
|
||||
--color-chart-5: var(--chart-5);
|
||||
--color-sidebar: var(--sidebar);
|
||||
--color-sidebar-foreground: var(--sidebar-foreground);
|
||||
--color-sidebar-primary: var(--sidebar-primary);
|
||||
--color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
|
||||
--color-sidebar-accent: var(--sidebar-accent);
|
||||
--color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
|
||||
--color-sidebar-border: var(--sidebar-border);
|
||||
--color-sidebar-ring: var(--sidebar-ring);
|
||||
}
|
||||
|
||||
:root {
|
||||
--radius: 0.625rem;
|
||||
--background: 255 255 255;
|
||||
--foreground: 15 23 42;
|
||||
--card: 255 255 255;
|
||||
--card-foreground: 15 23 42;
|
||||
--popover: 255 255 255;
|
||||
--popover-foreground: 15 23 42;
|
||||
--primary: 0 123 255;
|
||||
--primary-foreground: 255 255 255;
|
||||
--secondary: 245 245 245;
|
||||
--secondary-foreground: 51 51 51;
|
||||
--muted: 248 250 252;
|
||||
--muted-foreground: 100 116 139;
|
||||
--accent: 245 245 245;
|
||||
--accent-foreground: 51 51 51;
|
||||
--destructive: 239 68 68;
|
||||
--border: 229 231 235;
|
||||
--input: 229 231 235;
|
||||
--ring: 0 123 255;
|
||||
--chart-1: 0 123 255;
|
||||
--chart-2: 255 20 147;
|
||||
--chart-3: 50 205 50;
|
||||
--chart-4: 138 43 226;
|
||||
--chart-5: 255 215 0;
|
||||
--sidebar: 248 250 252;
|
||||
--sidebar-foreground: 15 23 42;
|
||||
--sidebar-primary: 0 123 255;
|
||||
--sidebar-primary-foreground: 255 255 255;
|
||||
--sidebar-accent: 245 245 245;
|
||||
--sidebar-accent-foreground: 51 51 51;
|
||||
--sidebar-border: 229 231 235;
|
||||
--sidebar-ring: 0 123 255;
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: 15 23 42;
|
||||
--foreground: 248 250 252;
|
||||
--card: 30 41 59;
|
||||
--card-foreground: 248 250 252;
|
||||
--popover: 30 41 59;
|
||||
--popover-foreground: 248 250 252;
|
||||
--primary: 59 130 246;
|
||||
--primary-foreground: 15 23 42;
|
||||
--secondary: 51 65 85;
|
||||
--secondary-foreground: 248 250 252;
|
||||
--muted: 51 65 85;
|
||||
--muted-foreground: 148 163 184;
|
||||
--accent: 51 65 85;
|
||||
--accent-foreground: 248 250 252;
|
||||
--destructive: 248 113 113;
|
||||
--border: 51 65 85;
|
||||
--input: 51 65 85;
|
||||
--ring: 59 130 246;
|
||||
--chart-1: 59 130 246;
|
||||
--chart-2: 236 72 153;
|
||||
--chart-3: 34 197 94;
|
||||
--chart-4: 147 51 234;
|
||||
--chart-5: 251 191 36;
|
||||
--sidebar: 30 41 59;
|
||||
--sidebar-foreground: 248 250 252;
|
||||
--sidebar-primary: 59 130 246;
|
||||
--sidebar-primary-foreground: 248 250 252;
|
||||
--sidebar-accent: 51 65 85;
|
||||
--sidebar-accent-foreground: 248 250 252;
|
||||
--sidebar-border: 51 65 85;
|
||||
--sidebar-ring: 59 130 246;
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border outline-ring/50;
|
||||
}
|
||||
body {
|
||||
@apply bg-gray-50 text-gray-900;
|
||||
}
|
||||
|
||||
/* Apple-style scrollbars */
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.2);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { getServerSession } from "next-auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { authOptions } from "./api/auth/[...nextauth]/route";
|
||||
import { authOptions } from "../pages/api/auth/[...nextauth]";
|
||||
|
||||
export default async function HomePage() {
|
||||
const session = await getServerSession(authOptions);
|
||||
|
||||
@ -7,9 +7,9 @@ export function Providers({ children }: { children: ReactNode }) {
|
||||
// Including error handling and refetch interval for better user experience
|
||||
return (
|
||||
<SessionProvider
|
||||
// Re-fetch session every 30 minutes (reduced from 10)
|
||||
refetchInterval={30 * 60}
|
||||
refetchOnWindowFocus={false}
|
||||
// Re-fetch session every 10 minutes
|
||||
refetchInterval={10 * 60}
|
||||
refetchOnWindowFocus={true}
|
||||
>
|
||||
{children}
|
||||
</SessionProvider>
|
||||
|
||||
@ -7,7 +7,7 @@ export default function RegisterPage() {
|
||||
const [company, setCompany] = useState<string>("");
|
||||
const [password, setPassword] = useState<string>("");
|
||||
const [csvUrl, setCsvUrl] = useState<string>("");
|
||||
const [role, setRole] = useState<string>("ADMIN"); // Default to ADMIN for company registration
|
||||
const [role, setRole] = useState<string>("admin"); // Default to admin for company registration
|
||||
const [error, setError] = useState<string>("");
|
||||
const router = useRouter();
|
||||
|
||||
@ -66,7 +66,7 @@ export default function RegisterPage() {
|
||||
>
|
||||
<option value="admin">Admin</option>
|
||||
<option value="user">User</option>
|
||||
<option value="AUDITOR">Auditor</option>
|
||||
<option value="auditor">Auditor</option>
|
||||
</select>
|
||||
<button className="bg-blue-600 text-white rounded py-2" type="submit">
|
||||
Register & Continue
|
||||
|
||||
@ -1,78 +0,0 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function checkRefactoredPipelineStatus() {
|
||||
try {
|
||||
console.log('=== REFACTORED PIPELINE STATUS ===\n');
|
||||
|
||||
// Get pipeline status using the new system
|
||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||
|
||||
console.log(`Total Sessions: ${pipelineStatus.totalSessions}\n`);
|
||||
|
||||
// Display status for each stage
|
||||
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||
|
||||
for (const stage of stages) {
|
||||
console.log(`${stage}:`);
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
|
||||
const pending = stageData.PENDING || 0;
|
||||
const inProgress = stageData.IN_PROGRESS || 0;
|
||||
const completed = stageData.COMPLETED || 0;
|
||||
const failed = stageData.FAILED || 0;
|
||||
const skipped = stageData.SKIPPED || 0;
|
||||
|
||||
console.log(` PENDING: ${pending}`);
|
||||
console.log(` IN_PROGRESS: ${inProgress}`);
|
||||
console.log(` COMPLETED: ${completed}`);
|
||||
console.log(` FAILED: ${failed}`);
|
||||
console.log(` SKIPPED: ${skipped}`);
|
||||
console.log('');
|
||||
}
|
||||
|
||||
// Show what needs processing
|
||||
console.log('=== WHAT NEEDS PROCESSING ===');
|
||||
|
||||
for (const stage of stages) {
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
const pending = stageData.PENDING || 0;
|
||||
const failed = stageData.FAILED || 0;
|
||||
|
||||
if (pending > 0 || failed > 0) {
|
||||
console.log(`• ${stage}: ${pending} pending, ${failed} failed`);
|
||||
}
|
||||
}
|
||||
|
||||
// Show failed sessions if any
|
||||
const failedSessions = await ProcessingStatusManager.getFailedSessions();
|
||||
if (failedSessions.length > 0) {
|
||||
console.log('\n=== FAILED SESSIONS ===');
|
||||
failedSessions.slice(0, 5).forEach(failure => {
|
||||
console.log(` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`);
|
||||
});
|
||||
|
||||
if (failedSessions.length > 5) {
|
||||
console.log(` ... and ${failedSessions.length - 5} more failed sessions`);
|
||||
}
|
||||
}
|
||||
|
||||
// Show sessions ready for AI processing
|
||||
const readyForAI = await ProcessingStatusManager.getSessionsNeedingProcessing('AI_ANALYSIS', 5);
|
||||
if (readyForAI.length > 0) {
|
||||
console.log('\n=== SESSIONS READY FOR AI PROCESSING ===');
|
||||
readyForAI.forEach(status => {
|
||||
console.log(` ${status.session.import?.externalSessionId || status.sessionId} (created: ${status.session.createdAt})`);
|
||||
});
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking pipeline status:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
checkRefactoredPipelineStatus();
|
||||
@ -1,21 +0,0 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "new-york",
|
||||
"rsc": true,
|
||||
"tsx": true,
|
||||
"tailwind": {
|
||||
"config": "",
|
||||
"css": "app/globals.css",
|
||||
"baseColor": "neutral",
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
"ui": "@/components/ui",
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
},
|
||||
"iconLibrary": "lucide"
|
||||
}
|
||||
@ -128,9 +128,9 @@ export function SentimentChart({ sentimentData }: SentimentChartProps) {
|
||||
sentimentData.negative,
|
||||
],
|
||||
backgroundColor: [
|
||||
"rgba(37, 99, 235, 0.8)", // blue (primary)
|
||||
"rgba(107, 114, 128, 0.8)", // gray
|
||||
"rgba(236, 72, 153, 0.8)", // pink
|
||||
"rgba(34, 197, 94, 0.8)", // green
|
||||
"rgba(249, 115, 22, 0.8)", // orange
|
||||
"rgba(239, 68, 68, 0.8)", // red
|
||||
],
|
||||
borderWidth: 1,
|
||||
},
|
||||
@ -196,12 +196,12 @@ export function LanguagePieChart({ languages }: LanguagePieChartProps) {
|
||||
{
|
||||
data,
|
||||
backgroundColor: [
|
||||
"rgba(37, 99, 235, 0.8)", // blue (primary)
|
||||
"rgba(107, 114, 128, 0.8)", // gray
|
||||
"rgba(236, 72, 153, 0.8)", // pink
|
||||
"rgba(34, 197, 94, 0.8)", // lime green
|
||||
"rgba(168, 85, 247, 0.8)", // purple
|
||||
"rgba(251, 191, 36, 0.8)", // yellow
|
||||
"rgba(59, 130, 246, 0.8)",
|
||||
"rgba(16, 185, 129, 0.8)",
|
||||
"rgba(249, 115, 22, 0.8)",
|
||||
"rgba(236, 72, 153, 0.8)",
|
||||
"rgba(139, 92, 246, 0.8)",
|
||||
"rgba(107, 114, 128, 0.8)",
|
||||
],
|
||||
borderWidth: 1,
|
||||
},
|
||||
|
||||
@ -1,172 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useRef, memo } from "react";
|
||||
|
||||
interface DateRangePickerProps {
|
||||
minDate: string;
|
||||
maxDate: string;
|
||||
onDateRangeChange: (startDate: string, endDate: string) => void;
|
||||
initialStartDate?: string;
|
||||
initialEndDate?: string;
|
||||
}
|
||||
|
||||
function DateRangePicker({
|
||||
minDate,
|
||||
maxDate,
|
||||
onDateRangeChange,
|
||||
initialStartDate,
|
||||
initialEndDate,
|
||||
}: DateRangePickerProps) {
|
||||
const [startDate, setStartDate] = useState(initialStartDate || minDate);
|
||||
const [endDate, setEndDate] = useState(initialEndDate || maxDate);
|
||||
const isInitializedRef = useRef(false);
|
||||
|
||||
useEffect(() => {
|
||||
// Update local state when props change (e.g., when date range is loaded from API)
|
||||
if (initialStartDate && initialStartDate !== startDate) {
|
||||
setStartDate(initialStartDate);
|
||||
}
|
||||
if (initialEndDate && initialEndDate !== endDate) {
|
||||
setEndDate(initialEndDate);
|
||||
}
|
||||
}, [initialStartDate, initialEndDate]);
|
||||
|
||||
useEffect(() => {
|
||||
// Only notify parent component after initial render and when dates actually change
|
||||
// This prevents the infinite loop by not including onDateRangeChange in dependencies
|
||||
if (isInitializedRef.current) {
|
||||
onDateRangeChange(startDate, endDate);
|
||||
} else {
|
||||
isInitializedRef.current = true;
|
||||
}
|
||||
}, [startDate, endDate]);
|
||||
|
||||
const handleStartDateChange = (newStartDate: string) => {
|
||||
// Ensure start date is not before min date
|
||||
if (newStartDate < minDate) {
|
||||
setStartDate(minDate);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure start date is not after end date
|
||||
if (newStartDate > endDate) {
|
||||
setEndDate(newStartDate);
|
||||
}
|
||||
|
||||
setStartDate(newStartDate);
|
||||
};
|
||||
|
||||
const handleEndDateChange = (newEndDate: string) => {
|
||||
// Ensure end date is not after max date
|
||||
if (newEndDate > maxDate) {
|
||||
setEndDate(maxDate);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure end date is not before start date
|
||||
if (newEndDate < startDate) {
|
||||
setStartDate(newEndDate);
|
||||
}
|
||||
|
||||
setEndDate(newEndDate);
|
||||
};
|
||||
|
||||
const resetToFullRange = () => {
|
||||
setStartDate(minDate);
|
||||
setEndDate(maxDate);
|
||||
};
|
||||
|
||||
const setLast30Days = () => {
|
||||
const thirtyDaysAgo = new Date();
|
||||
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
||||
const thirtyDaysAgoStr = thirtyDaysAgo.toISOString().split('T')[0];
|
||||
|
||||
// Use the later of 30 days ago or minDate
|
||||
const newStartDate = thirtyDaysAgoStr > minDate ? thirtyDaysAgoStr : minDate;
|
||||
setStartDate(newStartDate);
|
||||
setEndDate(maxDate);
|
||||
};
|
||||
|
||||
const setLast7Days = () => {
|
||||
const sevenDaysAgo = new Date();
|
||||
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
|
||||
const sevenDaysAgoStr = sevenDaysAgo.toISOString().split('T')[0];
|
||||
|
||||
// Use the later of 7 days ago or minDate
|
||||
const newStartDate = sevenDaysAgoStr > minDate ? sevenDaysAgoStr : minDate;
|
||||
setStartDate(newStartDate);
|
||||
setEndDate(maxDate);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow-sm border border-gray-200">
|
||||
<div className="flex flex-col sm:flex-row gap-4 items-start sm:items-center">
|
||||
<div className="flex flex-col sm:flex-row gap-3 items-start sm:items-center">
|
||||
<label className="text-sm font-medium text-gray-700 whitespace-nowrap">
|
||||
Date Range:
|
||||
</label>
|
||||
|
||||
<div className="flex flex-col sm:flex-row gap-2 items-start sm:items-center">
|
||||
<div className="flex items-center gap-2">
|
||||
<label htmlFor="start-date" className="text-sm text-gray-600">
|
||||
From:
|
||||
</label>
|
||||
<input
|
||||
id="start-date"
|
||||
type="date"
|
||||
value={startDate}
|
||||
min={minDate}
|
||||
max={maxDate}
|
||||
onChange={(e) => handleStartDateChange(e.target.value)}
|
||||
className="px-3 py-1.5 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-sky-500 focus:border-sky-500"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<label htmlFor="end-date" className="text-sm text-gray-600">
|
||||
To:
|
||||
</label>
|
||||
<input
|
||||
id="end-date"
|
||||
type="date"
|
||||
value={endDate}
|
||||
min={minDate}
|
||||
max={maxDate}
|
||||
onChange={(e) => handleEndDateChange(e.target.value)}
|
||||
className="px-3 py-1.5 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-sky-500 focus:border-sky-500"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<button
|
||||
onClick={setLast7Days}
|
||||
className="px-3 py-1.5 text-xs font-medium text-sky-600 bg-sky-50 border border-sky-200 rounded-md hover:bg-sky-100 transition-colors"
|
||||
>
|
||||
Last 7 days
|
||||
</button>
|
||||
<button
|
||||
onClick={setLast30Days}
|
||||
className="px-3 py-1.5 text-xs font-medium text-sky-600 bg-sky-50 border border-sky-200 rounded-md hover:bg-sky-100 transition-colors"
|
||||
>
|
||||
Last 30 days
|
||||
</button>
|
||||
<button
|
||||
onClick={resetToFullRange}
|
||||
className="px-3 py-1.5 text-xs font-medium text-gray-600 bg-gray-50 border border-gray-200 rounded-md hover:bg-gray-100 transition-colors"
|
||||
>
|
||||
All time
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-2 text-xs text-gray-500">
|
||||
Available data: {new Date(minDate).toLocaleDateString()} - {new Date(maxDate).toLocaleDateString()}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Export memoized component as default
|
||||
export default memo(DateRangePicker);
|
||||
@ -25,30 +25,6 @@ const getCountryCoordinates = (): Record<string, [number, number]> => {
|
||||
US: [37.0902, -95.7129],
|
||||
GB: [55.3781, -3.436],
|
||||
BA: [43.9159, 17.6791],
|
||||
NL: [52.1326, 5.2913],
|
||||
DE: [51.1657, 10.4515],
|
||||
FR: [46.6034, 1.8883],
|
||||
IT: [41.8719, 12.5674],
|
||||
ES: [40.4637, -3.7492],
|
||||
CA: [56.1304, -106.3468],
|
||||
PL: [51.9194, 19.1451],
|
||||
SE: [60.1282, 18.6435],
|
||||
NO: [60.472, 8.4689],
|
||||
FI: [61.9241, 25.7482],
|
||||
CH: [46.8182, 8.2275],
|
||||
AT: [47.5162, 14.5501],
|
||||
BE: [50.8503, 4.3517],
|
||||
DK: [56.2639, 9.5018],
|
||||
CZ: [49.8175, 15.473],
|
||||
HU: [47.1625, 19.5033],
|
||||
PT: [39.3999, -8.2245],
|
||||
GR: [39.0742, 21.8243],
|
||||
RO: [45.9432, 24.9668],
|
||||
IE: [53.4129, -8.2439],
|
||||
BG: [42.7339, 25.4858],
|
||||
HR: [45.1, 15.2],
|
||||
SK: [48.669, 19.699],
|
||||
SI: [46.1512, 14.9955]
|
||||
};
|
||||
// This function now primarily returns fallbacks.
|
||||
// The actual fetching using @rapideditor/country-coder will be in the component's useEffect.
|
||||
|
||||
@ -1,79 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Message } from "../lib/types";
|
||||
|
||||
interface MessageViewerProps {
|
||||
messages: Message[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Component to display parsed messages in a chat-like format
|
||||
*/
|
||||
export default function MessageViewer({ messages }: MessageViewerProps) {
|
||||
if (!messages || messages.length === 0) {
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">Conversation</h3>
|
||||
<p className="text-gray-500 italic">No parsed messages available</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">
|
||||
Conversation ({messages.length} messages)
|
||||
</h3>
|
||||
|
||||
<div className="space-y-3 max-h-96 overflow-y-auto">
|
||||
{messages.map((message) => (
|
||||
<div
|
||||
key={message.id}
|
||||
className={`flex ${
|
||||
message.role.toLowerCase() === "user"
|
||||
? "justify-end"
|
||||
: "justify-start"
|
||||
}`}
|
||||
>
|
||||
<div
|
||||
className={`max-w-xs lg:max-w-md px-4 py-2 rounded-lg ${
|
||||
message.role.toLowerCase() === "user"
|
||||
? "bg-blue-500 text-white"
|
||||
: message.role.toLowerCase() === "assistant"
|
||||
? "bg-gray-200 text-gray-800"
|
||||
: "bg-yellow-100 text-yellow-800"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between mb-1">
|
||||
<span className="text-xs font-medium opacity-75 mr-2">
|
||||
{message.role}
|
||||
</span>
|
||||
<span className="text-xs opacity-75 ml-2">
|
||||
{message.timestamp ? new Date(message.timestamp).toLocaleTimeString() : 'No timestamp'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="text-sm whitespace-pre-wrap">
|
||||
{message.content}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="mt-4 pt-3 border-t text-sm text-gray-500">
|
||||
<div className="flex justify-between">
|
||||
<span>
|
||||
First message: {messages[0].timestamp ? new Date(messages[0].timestamp).toLocaleString() : 'No timestamp'}
|
||||
</span>
|
||||
<span>
|
||||
Last message:{" "}
|
||||
{(() => {
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
return lastMessage.timestamp ? new Date(lastMessage.timestamp).toLocaleString() : 'No timestamp';
|
||||
})()}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -1,15 +1,10 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
BarChart,
|
||||
Bar,
|
||||
XAxis,
|
||||
YAxis,
|
||||
CartesianGrid,
|
||||
Tooltip,
|
||||
ResponsiveContainer,
|
||||
ReferenceLine,
|
||||
} from "recharts";
|
||||
import { useRef, useEffect } from "react";
|
||||
import Chart from "chart.js/auto";
|
||||
import annotationPlugin from "chartjs-plugin-annotation";
|
||||
|
||||
Chart.register(annotationPlugin);
|
||||
|
||||
interface ResponseTimeDistributionProps {
|
||||
data: number[];
|
||||
@ -17,145 +12,114 @@ interface ResponseTimeDistributionProps {
|
||||
targetResponseTime?: number;
|
||||
}
|
||||
|
||||
const CustomTooltip = ({ active, payload, label }: any) => {
|
||||
if (active && payload && payload.length) {
|
||||
return (
|
||||
<div className="rounded-lg border border-gray-200 bg-white p-3 shadow-md">
|
||||
<p className="text-sm font-medium text-gray-900">{label}</p>
|
||||
<p className="text-sm text-gray-600">
|
||||
<span className="font-medium text-gray-900">
|
||||
{payload[0].value}
|
||||
</span>{" "}
|
||||
responses
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export default function ResponseTimeDistribution({
|
||||
data,
|
||||
average,
|
||||
targetResponseTime,
|
||||
}: ResponseTimeDistributionProps) {
|
||||
if (!data || !data.length) {
|
||||
return (
|
||||
<div className="flex items-center justify-center h-64 text-muted-foreground">
|
||||
No response time data available
|
||||
</div>
|
||||
);
|
||||
}
|
||||
const ref = useRef<HTMLCanvasElement | null>(null);
|
||||
|
||||
// Create bins for the histogram (0-1s, 1-2s, 2-3s, etc.)
|
||||
const maxTime = Math.ceil(Math.max(...data));
|
||||
const bins = Array(Math.min(maxTime + 1, 10)).fill(0);
|
||||
useEffect(() => {
|
||||
if (!ref.current || !data || !data.length) return;
|
||||
|
||||
// Count responses in each bin
|
||||
data.forEach((time) => {
|
||||
const binIndex = Math.min(Math.floor(time), bins.length - 1);
|
||||
bins[binIndex]++;
|
||||
});
|
||||
const ctx = ref.current.getContext("2d");
|
||||
if (!ctx) return;
|
||||
|
||||
// Create chart data
|
||||
const chartData = bins.map((count, i) => {
|
||||
let label;
|
||||
if (i === bins.length - 1 && bins.length < maxTime + 1) {
|
||||
label = `${i}+ sec`;
|
||||
} else {
|
||||
label = `${i}-${i + 1} sec`;
|
||||
}
|
||||
// Create bins for the histogram (0-1s, 1-2s, 2-3s, etc.)
|
||||
const maxTime = Math.ceil(Math.max(...data));
|
||||
const bins = Array(Math.min(maxTime + 1, 10)).fill(0);
|
||||
|
||||
// Determine color based on response time using cohesive palette
|
||||
let color;
|
||||
if (i <= 2) color = "rgb(37, 99, 235)"; // Blue for fast (primary color)
|
||||
else if (i <= 5) color = "rgb(107, 114, 128)"; // Gray for medium
|
||||
else color = "rgb(236, 72, 153)"; // Pink for slow
|
||||
// Count responses in each bin
|
||||
data.forEach((time) => {
|
||||
const binIndex = Math.min(Math.floor(time), bins.length - 1);
|
||||
bins[binIndex]++;
|
||||
});
|
||||
|
||||
return {
|
||||
name: label,
|
||||
value: count,
|
||||
color,
|
||||
};
|
||||
});
|
||||
// Create labels for each bin
|
||||
const labels = bins.map((_, i) => {
|
||||
if (i === bins.length - 1 && bins.length < maxTime + 1) {
|
||||
return `${i}+ seconds`;
|
||||
}
|
||||
return `${i}-${i + 1} seconds`;
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="h-64">
|
||||
<ResponsiveContainer width="100%" height="100%">
|
||||
<BarChart data={chartData} margin={{ top: 20, right: 30, left: 20, bottom: 5 }}>
|
||||
<CartesianGrid
|
||||
strokeDasharray="3 3"
|
||||
stroke="rgb(229, 231, 235)"
|
||||
strokeOpacity={0.5}
|
||||
/>
|
||||
<XAxis
|
||||
dataKey="name"
|
||||
stroke="rgb(100, 116, 139)"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
/>
|
||||
<YAxis
|
||||
stroke="rgb(100, 116, 139)"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
label={{
|
||||
value: 'Number of Responses',
|
||||
angle: -90,
|
||||
position: 'insideLeft',
|
||||
style: { textAnchor: 'middle', fill: 'rgb(100, 116, 139)' }
|
||||
}}
|
||||
/>
|
||||
<Tooltip content={<CustomTooltip />} />
|
||||
|
||||
<Bar
|
||||
dataKey="value"
|
||||
radius={[4, 4, 0, 0]}
|
||||
fill="hsl(var(--chart-1))"
|
||||
>
|
||||
{chartData.map((entry, index) => (
|
||||
<Bar key={`cell-${index}`} fill={entry.color} />
|
||||
))}
|
||||
</Bar>
|
||||
const chart = new Chart(ctx, {
|
||||
type: "bar",
|
||||
data: {
|
||||
labels,
|
||||
datasets: [
|
||||
{
|
||||
label: "Responses",
|
||||
data: bins,
|
||||
backgroundColor: bins.map((_, i) => {
|
||||
// Green for fast, yellow for medium, red for slow
|
||||
if (i <= 2) return "rgba(34, 197, 94, 0.7)"; // Green
|
||||
if (i <= 5) return "rgba(250, 204, 21, 0.7)"; // Yellow
|
||||
return "rgba(239, 68, 68, 0.7)"; // Red
|
||||
}),
|
||||
borderWidth: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
plugins: {
|
||||
legend: { display: false },
|
||||
annotation: {
|
||||
annotations: {
|
||||
averageLine: {
|
||||
type: "line",
|
||||
yMin: 0,
|
||||
yMax: Math.max(...bins),
|
||||
xMin: average,
|
||||
xMax: average,
|
||||
borderColor: "rgba(75, 192, 192, 1)",
|
||||
borderWidth: 2,
|
||||
label: {
|
||||
display: true,
|
||||
content: "Avg: " + average.toFixed(1) + "s",
|
||||
position: "start",
|
||||
},
|
||||
},
|
||||
targetLine: targetResponseTime
|
||||
? {
|
||||
type: "line",
|
||||
yMin: 0,
|
||||
yMax: Math.max(...bins),
|
||||
xMin: targetResponseTime,
|
||||
xMax: targetResponseTime,
|
||||
borderColor: "rgba(75, 192, 192, 0.7)",
|
||||
borderWidth: 2,
|
||||
label: {
|
||||
display: true,
|
||||
content: "Target",
|
||||
position: "end",
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
},
|
||||
},
|
||||
scales: {
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
title: {
|
||||
display: true,
|
||||
text: "Number of Responses",
|
||||
},
|
||||
},
|
||||
x: {
|
||||
title: {
|
||||
display: true,
|
||||
text: "Response Time",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
{/* Average line */}
|
||||
<ReferenceLine
|
||||
x={Math.floor(average)}
|
||||
stroke="rgb(0, 123, 255)"
|
||||
strokeWidth={2}
|
||||
strokeDasharray="5 5"
|
||||
label={{
|
||||
value: `Avg: ${average.toFixed(1)}s`,
|
||||
position: "top" as const,
|
||||
style: {
|
||||
fill: "rgb(0, 123, 255)",
|
||||
fontSize: "12px",
|
||||
fontWeight: "500"
|
||||
}
|
||||
}}
|
||||
/>
|
||||
return () => chart.destroy();
|
||||
}, [data, average, targetResponseTime]);
|
||||
|
||||
{/* Target line (if provided) */}
|
||||
{targetResponseTime && (
|
||||
<ReferenceLine
|
||||
x={Math.floor(targetResponseTime)}
|
||||
stroke="rgb(255, 20, 147)"
|
||||
strokeWidth={2}
|
||||
strokeDasharray="3 3"
|
||||
label={{
|
||||
value: `Target: ${targetResponseTime}s`,
|
||||
position: "top" as const,
|
||||
style: {
|
||||
fill: "rgb(255, 20, 147)",
|
||||
fontSize: "12px",
|
||||
fontWeight: "500"
|
||||
}
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</BarChart>
|
||||
</ResponsiveContainer>
|
||||
</div>
|
||||
);
|
||||
return <canvas ref={ref} height={180} />;
|
||||
}
|
||||
|
||||
@ -15,10 +15,11 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">Session Details</h3>
|
||||
<div className="space-y-3">
|
||||
|
||||
<div className="space-y-2">
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Session ID:</span>
|
||||
<span className="font-medium font-mono text-sm">{session.id}</span>
|
||||
<span className="font-medium">{session.sessionId || session.id}</span>
|
||||
</div>
|
||||
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
@ -72,15 +73,20 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Sentiment:</span>
|
||||
<span
|
||||
className={`font-medium capitalize ${
|
||||
session.sentiment === "POSITIVE"
|
||||
className={`font-medium ${
|
||||
session.sentiment > 0.3
|
||||
? "text-green-500"
|
||||
: session.sentiment === "NEGATIVE"
|
||||
: session.sentiment < -0.3
|
||||
? "text-red-500"
|
||||
: "text-orange-500"
|
||||
}`}
|
||||
>
|
||||
{session.sentiment.toLowerCase()}
|
||||
{session.sentiment > 0.3
|
||||
? "Positive"
|
||||
: session.sentiment < -0.3
|
||||
? "Negative"
|
||||
: "Neutral"}{" "}
|
||||
({session.sentiment.toFixed(2)})
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
@ -90,6 +96,19 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
<span className="font-medium">{session.messagesSent || 0}</span>
|
||||
</div>
|
||||
|
||||
{typeof session.tokens === "number" && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Tokens:</span>
|
||||
<span className="font-medium">{session.tokens}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{typeof session.tokensEur === "number" && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Cost:</span>
|
||||
<span className="font-medium">€{session.tokensEur.toFixed(4)}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.avgResponseTime !== null &&
|
||||
session.avgResponseTime !== undefined && (
|
||||
@ -123,48 +142,24 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.ipAddress && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">IP Address:</span>
|
||||
<span className="font-medium font-mono text-sm">
|
||||
{session.ipAddress}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
{session.initialMsg && (
|
||||
<div className="border-b pb-2">
|
||||
<span className="text-gray-600 block mb-1">Initial Message:</span>
|
||||
<div className="bg-gray-50 p-2 rounded text-sm italic">
|
||||
"{session.initialMsg}"
|
||||
{/* Transcript rendering is now handled by the parent page (app/dashboard/sessions/[id]/page.tsx) */}
|
||||
{/* Fallback to link only if we only have the URL but no content - this might also be redundant if parent handles all transcript display */}
|
||||
{(!session.transcriptContent ||
|
||||
session.transcriptContent.length === 0) &&
|
||||
session.fullTranscriptUrl &&
|
||||
process.env.NODE_ENV !== "production" && (
|
||||
<div className="flex justify-between pt-2">
|
||||
<span className="text-gray-600">Transcript:</span>
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-blue-500 hover:text-blue-700 underline"
|
||||
>
|
||||
View Full Transcript
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.summary && (
|
||||
<div className="border-b pb-2">
|
||||
<span className="text-gray-600 block mb-1">AI Summary:</span>
|
||||
<div className="bg-blue-50 p-2 rounded text-sm">
|
||||
{session.summary}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
{session.fullTranscriptUrl && (
|
||||
<div className="flex justify-between pt-2">
|
||||
<span className="text-gray-600">Transcript:</span>
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-blue-500 hover:text-blue-700 underline"
|
||||
>
|
||||
View Full Transcript
|
||||
</a>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@ -167,7 +167,7 @@ const NavItem: React.FC<NavItemProps> = ({
|
||||
}
|
||||
}}
|
||||
>
|
||||
<span className={`shrink-0 ${isExpanded ? "mr-3" : "mx-auto"}`}>
|
||||
<span className={`flex-shrink-0 ${isExpanded ? "mr-3" : "mx-auto"}`}>
|
||||
{icon}
|
||||
</span>
|
||||
{isExpanded ? (
|
||||
@ -334,7 +334,7 @@ export default function Sidebar({
|
||||
isExpanded ? "" : "justify-center"
|
||||
}`}
|
||||
>
|
||||
<span className={`shrink-0 ${isExpanded ? "mr-3" : ""}`}>
|
||||
<span className={`flex-shrink-0 ${isExpanded ? "mr-3" : ""}`}>
|
||||
<LogoutIcon />
|
||||
</span>
|
||||
{isExpanded ? (
|
||||
|
||||
@ -1,76 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import React from 'react';
|
||||
import { TopQuestion } from '../lib/types';
|
||||
|
||||
interface TopQuestionsChartProps {
|
||||
data: TopQuestion[];
|
||||
title?: string;
|
||||
}
|
||||
|
||||
export default function TopQuestionsChart({ data, title = "Top 5 Asked Questions" }: TopQuestionsChartProps) {
|
||||
if (!data || data.length === 0) {
|
||||
return (
|
||||
<div className="bg-white p-6 rounded-2xl shadow-sm border border-gray-100">
|
||||
<h3 className="text-lg font-semibold text-gray-900 mb-6">{title}</h3>
|
||||
<div className="text-center py-12 text-gray-500">
|
||||
No questions data available
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Find the maximum count to calculate relative bar widths
|
||||
const maxCount = Math.max(...data.map(q => q.count));
|
||||
|
||||
return (
|
||||
<div className="bg-white p-6 rounded-2xl shadow-sm border border-gray-100">
|
||||
<h3 className="text-lg font-semibold text-gray-900 mb-6">{title}</h3>
|
||||
|
||||
<div className="space-y-6">
|
||||
{data.map((question, index) => {
|
||||
const percentage = maxCount > 0 ? (question.count / maxCount) * 100 : 0;
|
||||
|
||||
return (
|
||||
<div key={index} className="group">
|
||||
{/* Rank and Question */}
|
||||
<div className="flex items-start gap-4 mb-3">
|
||||
<div className="flex-shrink-0 w-8 h-8 bg-gray-100 text-gray-900 text-sm font-semibold rounded-full flex items-center justify-center">
|
||||
{index + 1}
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-sm font-medium text-gray-900 leading-relaxed mb-2">
|
||||
{question.question}
|
||||
</p>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex-1 mr-4">
|
||||
<div className="w-full bg-gray-100 rounded-full h-2">
|
||||
<div
|
||||
className="bg-blue-600 h-2 rounded-full transition-all duration-500 ease-out"
|
||||
style={{ width: `${percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<span className="text-sm font-semibold text-gray-900 min-w-0">
|
||||
{question.count} times
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{/* Summary */}
|
||||
<div className="mt-8 pt-6 border-t border-gray-100">
|
||||
<div className="flex justify-between items-center">
|
||||
<span className="text-sm text-gray-600">Total questions analyzed</span>
|
||||
<span className="text-sm font-semibold text-gray-900">
|
||||
{data.reduce((sum, q) => sum + q.count, 0)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
import { useState } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import rehypeRaw from "rehype-raw"; // Import rehype-raw
|
||||
import rehypeRaw from "rehype-raw";
|
||||
|
||||
interface TranscriptViewerProps {
|
||||
transcriptContent: string;
|
||||
@ -23,6 +23,7 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
const elements: React.ReactNode[] = [];
|
||||
let currentSpeaker: string | null = null;
|
||||
let currentMessages: string[] = [];
|
||||
let currentTimestamp: string | null = null;
|
||||
|
||||
// Process each line
|
||||
lines.forEach((line) => {
|
||||
@ -32,8 +33,15 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if this is a new speaker line
|
||||
if (line.startsWith("User:") || line.startsWith("Assistant:")) {
|
||||
// Check if this is a new speaker line with or without datetime
|
||||
// Format 1: [29.05.2025 21:26:44] User: message
|
||||
// Format 2: User: message
|
||||
const datetimeMatch = line.match(
|
||||
/^\[([^\]]+)\]\s*(User|Assistant):\s*(.*)$/
|
||||
);
|
||||
const simpleMatch = line.match(/^(User|Assistant):\s*(.*)$/);
|
||||
|
||||
if (datetimeMatch || simpleMatch) {
|
||||
// If we have accumulated messages for a previous speaker, add them
|
||||
if (currentSpeaker && currentMessages.length > 0) {
|
||||
elements.push(
|
||||
@ -48,6 +56,11 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
: "bg-gray-100 text-gray-800"
|
||||
}`}
|
||||
>
|
||||
{currentTimestamp && (
|
||||
<div className="text-xs opacity-60 mb-1">
|
||||
{currentTimestamp}
|
||||
</div>
|
||||
)}
|
||||
{currentMessages.map((msg, i) => (
|
||||
// Use ReactMarkdown to render each message part
|
||||
<ReactMarkdown
|
||||
@ -73,12 +86,22 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
currentMessages = [];
|
||||
}
|
||||
|
||||
// Set the new current speaker
|
||||
currentSpeaker = line.startsWith("User:") ? "User" : "Assistant";
|
||||
// Add the content after "User:" or "Assistant:"
|
||||
const messageContent = line.substring(line.indexOf(":") + 1).trim();
|
||||
if (messageContent) {
|
||||
currentMessages.push(messageContent);
|
||||
if (datetimeMatch) {
|
||||
// Format with datetime: [29.05.2025 21:26:44] User: message
|
||||
currentTimestamp = datetimeMatch[1];
|
||||
currentSpeaker = datetimeMatch[2];
|
||||
const messageContent = datetimeMatch[3].trim();
|
||||
if (messageContent) {
|
||||
currentMessages.push(messageContent);
|
||||
}
|
||||
} else if (simpleMatch) {
|
||||
// Format without datetime: User: message
|
||||
currentTimestamp = null;
|
||||
currentSpeaker = simpleMatch[1];
|
||||
const messageContent = simpleMatch[2].trim();
|
||||
if (messageContent) {
|
||||
currentMessages.push(messageContent);
|
||||
}
|
||||
}
|
||||
} else if (currentSpeaker) {
|
||||
// This is a continuation of the current speaker's message
|
||||
@ -100,6 +123,9 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
: "bg-gray-100 text-gray-800"
|
||||
}`}
|
||||
>
|
||||
{currentTimestamp && (
|
||||
<div className="text-xs opacity-60 mb-1">{currentTimestamp}</div>
|
||||
)}
|
||||
{currentMessages.map((msg, i) => (
|
||||
// Use ReactMarkdown to render each message part
|
||||
<ReactMarkdown
|
||||
@ -138,6 +164,9 @@ export default function TranscriptViewer({
|
||||
|
||||
const formattedElements = formatTranscript(transcriptContent);
|
||||
|
||||
// Hide "View Full Raw" button in production environment
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
|
||||
return (
|
||||
<div className="bg-white shadow-lg rounded-lg p-4 md:p-6 mt-6">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
@ -145,7 +174,7 @@ export default function TranscriptViewer({
|
||||
Session Transcript
|
||||
</h2>
|
||||
<div className="flex items-center space-x-3">
|
||||
{transcriptUrl && (
|
||||
{transcriptUrl && !isProduction && (
|
||||
<a
|
||||
href={transcriptUrl}
|
||||
target="_blank"
|
||||
|
||||
@ -22,7 +22,7 @@ export default function WelcomeBanner({ companyName }: WelcomeBannerProps) {
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="bg-linear-to-r from-blue-600 to-indigo-700 text-white p-6 rounded-xl shadow-lg mb-8">
|
||||
<div className="bg-gradient-to-r from-blue-600 to-indigo-700 text-white p-6 rounded-xl shadow-lg mb-8">
|
||||
<div className="flex justify-between items-center">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold">
|
||||
|
||||
@ -1,105 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
BarChart,
|
||||
Bar,
|
||||
XAxis,
|
||||
YAxis,
|
||||
CartesianGrid,
|
||||
Tooltip,
|
||||
ResponsiveContainer,
|
||||
Cell,
|
||||
} from "recharts";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
|
||||
interface BarChartProps {
|
||||
data: Array<{ name: string; value: number; [key: string]: any }>;
|
||||
title?: string;
|
||||
dataKey?: string;
|
||||
colors?: string[];
|
||||
height?: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const CustomTooltip = ({ active, payload, label }: any) => {
|
||||
if (active && payload && payload.length) {
|
||||
return (
|
||||
<div className="rounded-lg border bg-background p-3 shadow-md">
|
||||
<p className="text-sm font-medium">{label}</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
<span className="font-medium text-foreground">
|
||||
{payload[0].value}
|
||||
</span>{" "}
|
||||
sessions
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export default function ModernBarChart({
|
||||
data,
|
||||
title,
|
||||
dataKey = "value",
|
||||
colors = [
|
||||
"rgb(37, 99, 235)", // Blue (primary)
|
||||
"rgb(107, 114, 128)", // Gray
|
||||
"rgb(236, 72, 153)", // Pink
|
||||
"rgb(34, 197, 94)", // Lime green
|
||||
"rgb(168, 85, 247)", // Purple
|
||||
],
|
||||
height = 300,
|
||||
className,
|
||||
}: BarChartProps) {
|
||||
return (
|
||||
<Card className={className}>
|
||||
{title && (
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg font-semibold">{title}</CardTitle>
|
||||
</CardHeader>
|
||||
)}
|
||||
<CardContent>
|
||||
<ResponsiveContainer width="100%" height={height}>
|
||||
<BarChart data={data} margin={{ top: 5, right: 30, left: 20, bottom: 5 }}>
|
||||
<CartesianGrid
|
||||
strokeDasharray="3 3"
|
||||
stroke="rgb(229, 231, 235)"
|
||||
strokeOpacity={0.5}
|
||||
/>
|
||||
<XAxis
|
||||
dataKey="name"
|
||||
stroke="rgb(100, 116, 139)"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
angle={-45}
|
||||
textAnchor="end"
|
||||
height={80}
|
||||
/>
|
||||
<YAxis
|
||||
stroke="rgb(100, 116, 139)"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
/>
|
||||
<Tooltip content={<CustomTooltip />} />
|
||||
<Bar
|
||||
dataKey={dataKey}
|
||||
radius={[4, 4, 0, 0]}
|
||||
className="transition-all duration-200"
|
||||
>
|
||||
{data.map((entry, index) => (
|
||||
<Cell
|
||||
key={`cell-${index}`}
|
||||
fill={colors[index % colors.length]}
|
||||
className="hover:opacity-80"
|
||||
/>
|
||||
))}
|
||||
</Bar>
|
||||
</BarChart>
|
||||
</ResponsiveContainer>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@ -1,122 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { PieChart, Pie, Cell, ResponsiveContainer, Tooltip, Legend } from "recharts";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
|
||||
interface DonutChartProps {
|
||||
data: Array<{ name: string; value: number; color?: string }>;
|
||||
title?: string;
|
||||
centerText?: {
|
||||
title: string;
|
||||
value: string | number;
|
||||
};
|
||||
colors?: string[];
|
||||
height?: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const CustomTooltip = ({ active, payload }: any) => {
|
||||
if (active && payload && payload.length) {
|
||||
const data = payload[0];
|
||||
return (
|
||||
<div className="rounded-lg border bg-background p-3 shadow-md">
|
||||
<p className="text-sm font-medium">{data.name}</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
<span className="font-medium text-foreground">
|
||||
{data.value}
|
||||
</span>{" "}
|
||||
sessions ({((data.value / data.payload.total) * 100).toFixed(1)}%)
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const CustomLegend = ({ payload }: any) => {
|
||||
return (
|
||||
<div className="flex flex-wrap justify-center gap-4 mt-4">
|
||||
{payload.map((entry: any, index: number) => (
|
||||
<div key={index} className="flex items-center gap-2">
|
||||
<div
|
||||
className="w-3 h-3 rounded-full"
|
||||
style={{ backgroundColor: entry.color }}
|
||||
/>
|
||||
<span className="text-sm text-muted-foreground">{entry.value}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const CenterLabel = ({ centerText, total }: any) => {
|
||||
if (!centerText) return null;
|
||||
|
||||
return (
|
||||
<div className="absolute inset-0 flex items-center justify-center pointer-events-none">
|
||||
<div className="text-center">
|
||||
<p className="text-2xl font-bold">{centerText.value}</p>
|
||||
<p className="text-sm text-muted-foreground">{centerText.title}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default function ModernDonutChart({
|
||||
data,
|
||||
title,
|
||||
centerText,
|
||||
colors = [
|
||||
"rgb(37, 99, 235)", // Blue (primary)
|
||||
"rgb(107, 114, 128)", // Gray
|
||||
"rgb(236, 72, 153)", // Pink
|
||||
"rgb(34, 197, 94)", // Lime green
|
||||
"rgb(168, 85, 247)", // Purple
|
||||
],
|
||||
height = 300,
|
||||
className,
|
||||
}: DonutChartProps) {
|
||||
const total = data.reduce((sum, item) => sum + item.value, 0);
|
||||
const dataWithTotal = data.map(item => ({ ...item, total }));
|
||||
|
||||
return (
|
||||
<Card className={className}>
|
||||
{title && (
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg font-semibold">{title}</CardTitle>
|
||||
</CardHeader>
|
||||
)}
|
||||
<CardContent>
|
||||
<div className="relative">
|
||||
<ResponsiveContainer width="100%" height={height}>
|
||||
<PieChart>
|
||||
<Pie
|
||||
data={dataWithTotal}
|
||||
cx="50%"
|
||||
cy="50%"
|
||||
innerRadius={60}
|
||||
outerRadius={100}
|
||||
paddingAngle={2}
|
||||
dataKey="value"
|
||||
className="transition-all duration-200"
|
||||
>
|
||||
{dataWithTotal.map((entry, index) => (
|
||||
<Cell
|
||||
key={`cell-${index}`}
|
||||
fill={entry.color || colors[index % colors.length]}
|
||||
className="hover:opacity-80 cursor-pointer"
|
||||
stroke="white"
|
||||
strokeWidth={2}
|
||||
/>
|
||||
))}
|
||||
</Pie>
|
||||
<Tooltip content={<CustomTooltip />} />
|
||||
<Legend content={<CustomLegend />} />
|
||||
</PieChart>
|
||||
</ResponsiveContainer>
|
||||
<CenterLabel centerText={centerText} total={total} />
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@ -1,117 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
LineChart,
|
||||
Line,
|
||||
XAxis,
|
||||
YAxis,
|
||||
CartesianGrid,
|
||||
Tooltip,
|
||||
ResponsiveContainer,
|
||||
Area,
|
||||
AreaChart,
|
||||
} from "recharts";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
|
||||
interface LineChartProps {
|
||||
data: Array<{ date: string; value: number; [key: string]: any }>;
|
||||
title?: string;
|
||||
dataKey?: string;
|
||||
color?: string;
|
||||
gradient?: boolean;
|
||||
height?: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const CustomTooltip = ({ active, payload, label }: any) => {
|
||||
if (active && payload && payload.length) {
|
||||
return (
|
||||
<div className="rounded-lg border bg-background p-3 shadow-md">
|
||||
<p className="text-sm font-medium">{label}</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
<span className="font-medium text-foreground">
|
||||
{payload[0].value}
|
||||
</span>{" "}
|
||||
sessions
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export default function ModernLineChart({
|
||||
data,
|
||||
title,
|
||||
dataKey = "value",
|
||||
color = "rgb(37, 99, 235)",
|
||||
gradient = true,
|
||||
height = 300,
|
||||
className,
|
||||
}: LineChartProps) {
|
||||
const ChartComponent = gradient ? AreaChart : LineChart;
|
||||
|
||||
return (
|
||||
<Card className={className}>
|
||||
{title && (
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg font-semibold">{title}</CardTitle>
|
||||
</CardHeader>
|
||||
)}
|
||||
<CardContent>
|
||||
<ResponsiveContainer width="100%" height={height}>
|
||||
<ChartComponent data={data} margin={{ top: 5, right: 30, left: 20, bottom: 5 }}>
|
||||
<defs>
|
||||
{gradient && (
|
||||
<linearGradient id="colorGradient" x1="0" y1="0" x2="0" y2="1">
|
||||
<stop offset="5%" stopColor={color} stopOpacity={0.3} />
|
||||
<stop offset="95%" stopColor={color} stopOpacity={0.05} />
|
||||
</linearGradient>
|
||||
)}
|
||||
</defs>
|
||||
<CartesianGrid
|
||||
strokeDasharray="3 3"
|
||||
stroke="rgb(229, 231, 235)"
|
||||
strokeOpacity={0.5}
|
||||
/>
|
||||
<XAxis
|
||||
dataKey="date"
|
||||
stroke="rgb(100, 116, 139)"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
/>
|
||||
<YAxis
|
||||
stroke="rgb(100, 116, 139)"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
/>
|
||||
<Tooltip content={<CustomTooltip />} />
|
||||
|
||||
{gradient ? (
|
||||
<Area
|
||||
type="monotone"
|
||||
dataKey={dataKey}
|
||||
stroke={color}
|
||||
strokeWidth={2}
|
||||
fill="url(#colorGradient)"
|
||||
dot={{ fill: color, strokeWidth: 2, r: 4 }}
|
||||
activeDot={{ r: 6, stroke: color, strokeWidth: 2 }}
|
||||
/>
|
||||
) : (
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey={dataKey}
|
||||
stroke={color}
|
||||
strokeWidth={2}
|
||||
dot={{ fill: color, strokeWidth: 2, r: 4 }}
|
||||
activeDot={{ r: 6, stroke: color, strokeWidth: 2 }}
|
||||
/>
|
||||
)}
|
||||
</ChartComponent>
|
||||
</ResponsiveContainer>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@ -1,46 +0,0 @@
|
||||
import * as React from "react"
|
||||
import { Slot } from "@radix-ui/react-slot"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const badgeVariants = cva(
|
||||
"inline-flex items-center justify-center rounded-md border px-2 py-0.5 text-xs font-medium w-fit whitespace-nowrap shrink-0 [&>svg]:size-3 gap-1 [&>svg]:pointer-events-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive transition-[color,box-shadow] overflow-hidden",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
"border-transparent bg-primary text-primary-foreground [a&]:hover:bg-primary/90",
|
||||
secondary:
|
||||
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
|
||||
destructive:
|
||||
"border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||
outline:
|
||||
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
function Badge({
|
||||
className,
|
||||
variant,
|
||||
asChild = false,
|
||||
...props
|
||||
}: React.ComponentProps<"span"> &
|
||||
VariantProps<typeof badgeVariants> & { asChild?: boolean }) {
|
||||
const Comp = asChild ? Slot : "span"
|
||||
|
||||
return (
|
||||
<Comp
|
||||
data-slot="badge"
|
||||
className={cn(badgeVariants({ variant }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Badge, badgeVariants }
|
||||
@ -1,59 +0,0 @@
|
||||
import * as React from "react"
|
||||
import { Slot } from "@radix-ui/react-slot"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const buttonVariants = cva(
|
||||
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-all disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg:not([class*='size-'])]:size-4 shrink-0 [&_svg]:shrink-0 outline-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
"bg-primary text-primary-foreground shadow-xs hover:bg-primary/90",
|
||||
destructive:
|
||||
"bg-destructive text-white shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||
outline:
|
||||
"border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50",
|
||||
secondary:
|
||||
"bg-secondary text-secondary-foreground shadow-xs hover:bg-secondary/80",
|
||||
ghost:
|
||||
"hover:bg-accent hover:text-accent-foreground dark:hover:bg-accent/50",
|
||||
link: "text-primary underline-offset-4 hover:underline",
|
||||
},
|
||||
size: {
|
||||
default: "h-9 px-4 py-2 has-[>svg]:px-3",
|
||||
sm: "h-8 rounded-md gap-1.5 px-3 has-[>svg]:px-2.5",
|
||||
lg: "h-10 rounded-md px-6 has-[>svg]:px-4",
|
||||
icon: "size-9",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
size: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
function Button({
|
||||
className,
|
||||
variant,
|
||||
size,
|
||||
asChild = false,
|
||||
...props
|
||||
}: React.ComponentProps<"button"> &
|
||||
VariantProps<typeof buttonVariants> & {
|
||||
asChild?: boolean
|
||||
}) {
|
||||
const Comp = asChild ? Slot : "button"
|
||||
|
||||
return (
|
||||
<Comp
|
||||
data-slot="button"
|
||||
className={cn(buttonVariants({ variant, size, className }))}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Button, buttonVariants }
|
||||
@ -1,92 +0,0 @@
|
||||
import * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Card({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card"
|
||||
className={cn(
|
||||
"bg-white text-gray-900 flex flex-col gap-6 rounded-2xl border border-gray-100 py-6 shadow-sm",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-header"
|
||||
className={cn(
|
||||
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-1.5 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-title"
|
||||
className={cn("leading-none font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardDescription({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-description"
|
||||
className={cn("text-muted-foreground text-sm", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardAction({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-action"
|
||||
className={cn(
|
||||
"col-start-2 row-span-2 row-start-1 self-start justify-self-end",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-content"
|
||||
className={cn("px-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CardFooter({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="card-footer"
|
||||
className={cn("flex items-center px-6 [.border-t]:pt-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export {
|
||||
Card,
|
||||
CardHeader,
|
||||
CardFooter,
|
||||
CardTitle,
|
||||
CardAction,
|
||||
CardDescription,
|
||||
CardContent,
|
||||
}
|
||||
@ -1,257 +0,0 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"
|
||||
import { CheckIcon, ChevronRightIcon, CircleIcon } from "lucide-react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function DropdownMenu({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Root>) {
|
||||
return <DropdownMenuPrimitive.Root data-slot="dropdown-menu" {...props} />
|
||||
}
|
||||
|
||||
function DropdownMenuPortal({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Portal>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Portal data-slot="dropdown-menu-portal" {...props} />
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuTrigger({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Trigger>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Trigger
|
||||
data-slot="dropdown-menu-trigger"
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuContent({
|
||||
className,
|
||||
sideOffset = 4,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Content>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Portal>
|
||||
<DropdownMenuPrimitive.Content
|
||||
data-slot="dropdown-menu-content"
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 max-h-(--radix-dropdown-menu-content-available-height) min-w-32 origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</DropdownMenuPrimitive.Portal>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuGroup({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Group>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Group data-slot="dropdown-menu-group" {...props} />
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuItem({
|
||||
className,
|
||||
inset,
|
||||
variant = "default",
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Item> & {
|
||||
inset?: boolean
|
||||
variant?: "default" | "destructive"
|
||||
}) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Item
|
||||
data-slot="dropdown-menu-item"
|
||||
data-inset={inset}
|
||||
data-variant={variant}
|
||||
className={cn(
|
||||
"focus:bg-accent focus:text-accent-foreground data-[variant=destructive]:text-destructive data-[variant=destructive]:focus:bg-destructive/10 dark:data-[variant=destructive]:focus:bg-destructive/20 data-[variant=destructive]:focus:text-destructive data-[variant=destructive]:*:[svg]:text-destructive! [&_svg:not([class*='text-'])]:text-muted-foreground relative flex cursor-default items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-hidden select-none data-disabled:pointer-events-none data-disabled:opacity-50 data-inset:pl-8 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuCheckboxItem({
|
||||
className,
|
||||
children,
|
||||
checked,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.CheckboxItem>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.CheckboxItem
|
||||
data-slot="dropdown-menu-checkbox-item"
|
||||
className={cn(
|
||||
"focus:bg-accent focus:text-accent-foreground relative flex cursor-default items-center gap-2 rounded-sm py-1.5 pr-2 pl-8 text-sm outline-hidden select-none data-disabled:pointer-events-none data-disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||
className
|
||||
)}
|
||||
checked={checked}
|
||||
{...props}
|
||||
>
|
||||
<span className="pointer-events-none absolute left-2 flex size-3.5 items-center justify-center">
|
||||
<DropdownMenuPrimitive.ItemIndicator>
|
||||
<CheckIcon className="size-4" />
|
||||
</DropdownMenuPrimitive.ItemIndicator>
|
||||
</span>
|
||||
{children}
|
||||
</DropdownMenuPrimitive.CheckboxItem>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuRadioGroup({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.RadioGroup>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.RadioGroup
|
||||
data-slot="dropdown-menu-radio-group"
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuRadioItem({
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.RadioItem>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.RadioItem
|
||||
data-slot="dropdown-menu-radio-item"
|
||||
className={cn(
|
||||
"focus:bg-accent focus:text-accent-foreground relative flex cursor-default items-center gap-2 rounded-sm py-1.5 pr-2 pl-8 text-sm outline-hidden select-none data-disabled:pointer-events-none data-disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<span className="pointer-events-none absolute left-2 flex size-3.5 items-center justify-center">
|
||||
<DropdownMenuPrimitive.ItemIndicator>
|
||||
<CircleIcon className="size-2 fill-current" />
|
||||
</DropdownMenuPrimitive.ItemIndicator>
|
||||
</span>
|
||||
{children}
|
||||
</DropdownMenuPrimitive.RadioItem>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuLabel({
|
||||
className,
|
||||
inset,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Label> & {
|
||||
inset?: boolean
|
||||
}) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Label
|
||||
data-slot="dropdown-menu-label"
|
||||
data-inset={inset}
|
||||
className={cn(
|
||||
"px-2 py-1.5 text-sm font-medium data-inset:pl-8",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuSeparator({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Separator>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.Separator
|
||||
data-slot="dropdown-menu-separator"
|
||||
className={cn("bg-border -mx-1 my-1 h-px", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuShortcut({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<"span">) {
|
||||
return (
|
||||
<span
|
||||
data-slot="dropdown-menu-shortcut"
|
||||
className={cn(
|
||||
"text-muted-foreground ml-auto text-xs tracking-widest",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuSub({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.Sub>) {
|
||||
return <DropdownMenuPrimitive.Sub data-slot="dropdown-menu-sub" {...props} />
|
||||
}
|
||||
|
||||
function DropdownMenuSubTrigger({
|
||||
className,
|
||||
inset,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.SubTrigger> & {
|
||||
inset?: boolean
|
||||
}) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.SubTrigger
|
||||
data-slot="dropdown-menu-sub-trigger"
|
||||
data-inset={inset}
|
||||
className={cn(
|
||||
"focus:bg-accent focus:text-accent-foreground data-[state=open]:bg-accent data-[state=open]:text-accent-foreground flex cursor-default items-center rounded-sm px-2 py-1.5 text-sm outline-hidden select-none data-inset:pl-8",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<ChevronRightIcon className="ml-auto size-4" />
|
||||
</DropdownMenuPrimitive.SubTrigger>
|
||||
)
|
||||
}
|
||||
|
||||
function DropdownMenuSubContent({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof DropdownMenuPrimitive.SubContent>) {
|
||||
return (
|
||||
<DropdownMenuPrimitive.SubContent
|
||||
data-slot="dropdown-menu-sub-content"
|
||||
className={cn(
|
||||
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 min-w-32 origin-(--radix-dropdown-menu-content-transform-origin) overflow-hidden rounded-md border p-1 shadow-lg",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export {
|
||||
DropdownMenu,
|
||||
DropdownMenuPortal,
|
||||
DropdownMenuTrigger,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuGroup,
|
||||
DropdownMenuLabel,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuCheckboxItem,
|
||||
DropdownMenuRadioGroup,
|
||||
DropdownMenuRadioItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuShortcut,
|
||||
DropdownMenuSub,
|
||||
DropdownMenuSubTrigger,
|
||||
DropdownMenuSubContent,
|
||||
}
|
||||
@ -1,150 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Card, CardContent, CardHeader } from "@/components/ui/card";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { TrendingUp, TrendingDown, Minus } from "lucide-react";
|
||||
|
||||
interface MetricCardProps {
|
||||
title: string;
|
||||
value: string | number | null | undefined;
|
||||
description?: string;
|
||||
icon?: React.ReactNode;
|
||||
trend?: {
|
||||
value: number;
|
||||
label?: string;
|
||||
isPositive?: boolean;
|
||||
};
|
||||
variant?: "default" | "primary" | "success" | "warning" | "danger";
|
||||
isLoading?: boolean;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export default function MetricCard({
|
||||
title,
|
||||
value,
|
||||
description,
|
||||
icon,
|
||||
trend,
|
||||
variant = "default",
|
||||
isLoading = false,
|
||||
className,
|
||||
}: MetricCardProps) {
|
||||
if (isLoading) {
|
||||
return (
|
||||
<Card className={cn("relative overflow-hidden", className)}>
|
||||
<CardHeader className="pb-3">
|
||||
<div className="flex items-center justify-between">
|
||||
<Skeleton className="h-4 w-24" />
|
||||
<Skeleton className="h-10 w-10 rounded-full" />
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Skeleton className="h-8 w-16 mb-2" />
|
||||
<Skeleton className="h-3 w-20" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
const getVariantClasses = () => {
|
||||
switch (variant) {
|
||||
case "primary":
|
||||
return "border border-blue-100 bg-white shadow-sm hover:shadow-md";
|
||||
case "success":
|
||||
return "border border-green-100 bg-white shadow-sm hover:shadow-md";
|
||||
case "warning":
|
||||
return "border border-pink-100 bg-white shadow-sm hover:shadow-md";
|
||||
case "danger":
|
||||
return "border border-red-100 bg-white shadow-sm hover:shadow-md";
|
||||
default:
|
||||
return "border border-gray-100 bg-white shadow-sm hover:shadow-md";
|
||||
}
|
||||
};
|
||||
|
||||
const getIconClasses = () => {
|
||||
return "bg-gray-50 text-gray-900 border-gray-100";
|
||||
};
|
||||
|
||||
const getTrendIcon = () => {
|
||||
if (!trend) return null;
|
||||
|
||||
if (trend.value === 0) {
|
||||
return <Minus className="h-3 w-3" />;
|
||||
}
|
||||
|
||||
return trend.isPositive !== false ? (
|
||||
<TrendingUp className="h-3 w-3" />
|
||||
) : (
|
||||
<TrendingDown className="h-3 w-3" />
|
||||
);
|
||||
};
|
||||
|
||||
const getTrendColor = () => {
|
||||
if (!trend || trend.value === 0) return "text-muted-foreground";
|
||||
return trend.isPositive !== false ? "text-green-600 dark:text-green-400" : "text-red-600 dark:text-red-400";
|
||||
};
|
||||
|
||||
return (
|
||||
<Card
|
||||
className={cn(
|
||||
"relative overflow-hidden transition-all duration-200 hover:shadow-lg hover:-translate-y-0.5",
|
||||
getVariantClasses(),
|
||||
className
|
||||
)}
|
||||
>
|
||||
|
||||
<CardHeader className="pb-3 relative">
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="space-y-1">
|
||||
<p className="text-sm font-medium text-gray-900 leading-none">
|
||||
{title}
|
||||
</p>
|
||||
{description && (
|
||||
<p className="text-xs text-muted-foreground/80">
|
||||
{description}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-10 w-10 shrink-0 items-center justify-center rounded-full border transition-colors",
|
||||
getIconClasses()
|
||||
)}
|
||||
>
|
||||
<span className="text-lg">{icon}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent className="relative">
|
||||
<div className="flex items-end justify-between">
|
||||
<div className="space-y-1">
|
||||
<p className="text-2xl font-bold tracking-tight text-gray-900">
|
||||
{value ?? "—"}
|
||||
</p>
|
||||
|
||||
{trend && (
|
||||
<Badge
|
||||
variant="secondary"
|
||||
className={cn(
|
||||
"text-xs font-medium px-2 py-0.5 gap-1",
|
||||
getTrendColor(),
|
||||
"bg-background/50 border-current/20"
|
||||
)}
|
||||
>
|
||||
{getTrendIcon()}
|
||||
{Math.abs(trend.value).toFixed(1)}%
|
||||
{trend.label && ` ${trend.label}`}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as SeparatorPrimitive from "@radix-ui/react-separator"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Separator({
|
||||
className,
|
||||
orientation = "horizontal",
|
||||
decorative = true,
|
||||
...props
|
||||
}: React.ComponentProps<typeof SeparatorPrimitive.Root>) {
|
||||
return (
|
||||
<SeparatorPrimitive.Root
|
||||
data-slot="separator"
|
||||
decorative={decorative}
|
||||
orientation={orientation}
|
||||
className={cn(
|
||||
"bg-border shrink-0 data-[orientation=horizontal]:h-px data-[orientation=horizontal]:w-full data-[orientation=vertical]:h-full data-[orientation=vertical]:w-px",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Separator }
|
||||
@ -1,13 +0,0 @@
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Skeleton({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="skeleton"
|
||||
className={cn("bg-accent animate-pulse rounded-md", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Skeleton }
|
||||
@ -1,61 +0,0 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as TooltipPrimitive from "@radix-ui/react-tooltip"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function TooltipProvider({
|
||||
delayDuration = 0,
|
||||
...props
|
||||
}: React.ComponentProps<typeof TooltipPrimitive.Provider>) {
|
||||
return (
|
||||
<TooltipPrimitive.Provider
|
||||
data-slot="tooltip-provider"
|
||||
delayDuration={delayDuration}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function Tooltip({
|
||||
...props
|
||||
}: React.ComponentProps<typeof TooltipPrimitive.Root>) {
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<TooltipPrimitive.Root data-slot="tooltip" {...props} />
|
||||
</TooltipProvider>
|
||||
)
|
||||
}
|
||||
|
||||
function TooltipTrigger({
|
||||
...props
|
||||
}: React.ComponentProps<typeof TooltipPrimitive.Trigger>) {
|
||||
return <TooltipPrimitive.Trigger data-slot="tooltip-trigger" {...props} />
|
||||
}
|
||||
|
||||
function TooltipContent({
|
||||
className,
|
||||
sideOffset = 0,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof TooltipPrimitive.Content>) {
|
||||
return (
|
||||
<TooltipPrimitive.Portal>
|
||||
<TooltipPrimitive.Content
|
||||
data-slot="tooltip-content"
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
"bg-primary text-primary-foreground animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<TooltipPrimitive.Arrow className="bg-primary fill-primary z-50 size-2.5 translate-y-[calc(-50%-2px)] rotate-45 rounded-[2px]" />
|
||||
</TooltipPrimitive.Content>
|
||||
</TooltipPrimitive.Portal>
|
||||
)
|
||||
}
|
||||
|
||||
export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }
|
||||
@ -1,81 +0,0 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function debugImportStatus() {
|
||||
try {
|
||||
console.log('=== DEBUGGING PROCESSING STATUS (REFACTORED SYSTEM) ===\n');
|
||||
|
||||
// Get pipeline status using the new system
|
||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||
|
||||
console.log(`Total Sessions: ${pipelineStatus.totalSessions}\n`);
|
||||
|
||||
// Display status for each stage
|
||||
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||
|
||||
for (const stage of stages) {
|
||||
console.log(`${stage}:`);
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
|
||||
const pending = stageData.PENDING || 0;
|
||||
const inProgress = stageData.IN_PROGRESS || 0;
|
||||
const completed = stageData.COMPLETED || 0;
|
||||
const failed = stageData.FAILED || 0;
|
||||
const skipped = stageData.SKIPPED || 0;
|
||||
|
||||
console.log(` PENDING: ${pending}`);
|
||||
console.log(` IN_PROGRESS: ${inProgress}`);
|
||||
console.log(` COMPLETED: ${completed}`);
|
||||
console.log(` FAILED: ${failed}`);
|
||||
console.log(` SKIPPED: ${skipped}`);
|
||||
console.log('');
|
||||
}
|
||||
|
||||
// Check Sessions vs SessionImports
|
||||
console.log('=== SESSION IMPORT RELATIONSHIP ===');
|
||||
const sessionsWithImports = await prisma.session.count({
|
||||
where: { importId: { not: null } }
|
||||
});
|
||||
const totalSessions = await prisma.session.count();
|
||||
|
||||
console.log(` Sessions with importId: ${sessionsWithImports}`);
|
||||
console.log(` Total sessions: ${totalSessions}`);
|
||||
|
||||
// Show failed sessions if any
|
||||
const failedSessions = await ProcessingStatusManager.getFailedSessions();
|
||||
if (failedSessions.length > 0) {
|
||||
console.log('\n=== FAILED SESSIONS ===');
|
||||
failedSessions.slice(0, 10).forEach(failure => {
|
||||
console.log(` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`);
|
||||
});
|
||||
|
||||
if (failedSessions.length > 10) {
|
||||
console.log(` ... and ${failedSessions.length - 10} more failed sessions`);
|
||||
}
|
||||
} else {
|
||||
console.log('\n✓ No failed sessions found');
|
||||
}
|
||||
|
||||
// Show what needs processing
|
||||
console.log('\n=== WHAT NEEDS PROCESSING ===');
|
||||
|
||||
for (const stage of stages) {
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
const pending = stageData.PENDING || 0;
|
||||
const failed = stageData.FAILED || 0;
|
||||
|
||||
if (pending > 0 || failed > 0) {
|
||||
console.log(`• ${stage}: ${pending} pending, ${failed} failed`);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error debugging processing status:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
debugImportStatus();
|
||||
227
docs/D1_CLI_ACCESS.md
Normal file
227
docs/D1_CLI_ACCESS.md
Normal file
@ -0,0 +1,227 @@
|
||||
# D1 Database Command Line Access
|
||||
|
||||
This guide shows you how to access and manage your Cloudflare D1 database `d1-notso-livedash` from the command line.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Using the Custom D1 CLI Script
|
||||
|
||||
```bash
|
||||
# Simple and fast commands
|
||||
pnpm d1 tables # List all tables
|
||||
pnpm d1 info # Database information
|
||||
pnpm d1 schema User # Show table schema
|
||||
pnpm d1 query "SELECT COUNT(*) FROM User" # Execute SQL
|
||||
pnpm d1 export backup.sql # Export database
|
||||
|
||||
# Remote (production) commands
|
||||
pnpm d1 --remote info # Production database info
|
||||
pnpm d1 --remote query "SELECT * FROM Company LIMIT 5"
|
||||
```
|
||||
|
||||
### Using Package.json Scripts
|
||||
|
||||
```bash
|
||||
# Database information
|
||||
pnpm d1:list # List all D1 databases
|
||||
pnpm d1:info # Local database info
|
||||
pnpm d1:info:remote # Remote database info
|
||||
|
||||
# Backup and export
|
||||
pnpm d1:export # Export local database
|
||||
pnpm d1:export:remote # Export remote database
|
||||
pnpm d1:schema # Export schema only
|
||||
```
|
||||
|
||||
### Direct Wrangler Commands
|
||||
|
||||
```bash
|
||||
# Basic operations
|
||||
npx wrangler d1 list
|
||||
npx wrangler d1 info d1-notso-livedash
|
||||
npx wrangler d1 execute d1-notso-livedash --command "SELECT * FROM User"
|
||||
|
||||
# Remote operations (add --remote flag)
|
||||
npx wrangler d1 info d1-notso-livedash --remote
|
||||
npx wrangler d1 execute d1-notso-livedash --remote --command "SELECT COUNT(*) FROM Company"
|
||||
```
|
||||
|
||||
## Database Schema
|
||||
|
||||
Your D1 database contains these tables:
|
||||
|
||||
### Company Table
|
||||
|
||||
```sql
|
||||
- id (TEXT, PRIMARY KEY)
|
||||
- name (TEXT, NOT NULL)
|
||||
- csvUrl (TEXT, NOT NULL)
|
||||
- csvUsername (TEXT)
|
||||
- csvPassword (TEXT)
|
||||
- sentimentAlert (REAL)
|
||||
- dashboardOpts (TEXT)
|
||||
- createdAt (DATETIME, NOT NULL, DEFAULT CURRENT_TIMESTAMP)
|
||||
- updatedAt (DATETIME, NOT NULL)
|
||||
```
|
||||
|
||||
### User Table
|
||||
|
||||
```sql
|
||||
- id (TEXT, PRIMARY KEY)
|
||||
- email (TEXT, NOT NULL)
|
||||
- password (TEXT, NOT NULL)
|
||||
- companyId (TEXT, NOT NULL)
|
||||
- role (TEXT, NOT NULL)
|
||||
- resetToken (TEXT)
|
||||
- resetTokenExpiry (DATETIME)
|
||||
```
|
||||
|
||||
### Session Table
|
||||
|
||||
```sql
|
||||
- id (TEXT, PRIMARY KEY)
|
||||
- userId (TEXT, NOT NULL)
|
||||
- expiresAt (DATETIME, NOT NULL)
|
||||
```
|
||||
|
||||
## Common SQL Queries
|
||||
|
||||
### Data Exploration
|
||||
|
||||
```sql
|
||||
-- Check table sizes
|
||||
SELECT 'Company' as table_name, COUNT(*) as count FROM Company
|
||||
UNION ALL
|
||||
SELECT 'User' as table_name, COUNT(*) as count FROM User
|
||||
UNION ALL
|
||||
SELECT 'Session' as table_name, COUNT(*) as count FROM Session;
|
||||
|
||||
-- Show all table names
|
||||
SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;
|
||||
|
||||
-- Get table schema
|
||||
PRAGMA table_info(User);
|
||||
```
|
||||
|
||||
### Business Queries
|
||||
|
||||
```sql
|
||||
-- List companies with user counts
|
||||
SELECT c.name, c.id, COUNT(u.id) as user_count
|
||||
FROM Company c
|
||||
LEFT JOIN User u ON c.id = u.companyId
|
||||
GROUP BY c.id, c.name;
|
||||
|
||||
-- Find admin users
|
||||
SELECT u.email, c.name as company
|
||||
FROM User u
|
||||
JOIN Company c ON u.companyId = c.id
|
||||
WHERE u.role = 'admin';
|
||||
|
||||
-- Active sessions
|
||||
SELECT COUNT(*) as active_sessions
|
||||
FROM Session
|
||||
WHERE expiresAt > datetime('now');
|
||||
```
|
||||
|
||||
## Local vs Remote Databases
|
||||
|
||||
- **Local Database**: Located at `.wrangler/state/v3/d1/` (for development)
|
||||
- **Remote Database**: Cloudflare's production D1 database
|
||||
|
||||
### When to Use Each:
|
||||
|
||||
- **Local**: Development, testing, safe experimentation
|
||||
- **Remote**: Production data, deployment verification
|
||||
|
||||
## Database Statistics
|
||||
|
||||
Current database info:
|
||||
|
||||
- **Database ID**: d4ee7efe-d37a-48e4-bed7-fdfaa5108131
|
||||
- **Region**: WEUR (Western Europe)
|
||||
- **Size**: ~53.2 kB
|
||||
- **Tables**: 6 (including system tables)
|
||||
- **Read Queries (24h)**: 65
|
||||
- **Write Queries (24h)**: 8
|
||||
|
||||
## Scripts Available
|
||||
|
||||
### `/scripts/d1.js` (Recommended)
|
||||
|
||||
Simple, fast CLI for common operations:
|
||||
|
||||
```bash
|
||||
node scripts/d1.js tables
|
||||
node scripts/d1.js schema User
|
||||
node scripts/d1.js query "SELECT * FROM Company"
|
||||
node scripts/d1.js --remote info
|
||||
```
|
||||
|
||||
### `/scripts/d1-query.js`
|
||||
|
||||
Simple query executor:
|
||||
|
||||
```bash
|
||||
node scripts/d1-query.js "SELECT COUNT(*) FROM User"
|
||||
node scripts/d1-query.js --remote "SELECT * FROM Company"
|
||||
```
|
||||
|
||||
### `/scripts/d1-manager.js`
|
||||
|
||||
Comprehensive database management (if needed for advanced operations):
|
||||
|
||||
```bash
|
||||
node scripts/d1-manager.js info
|
||||
node scripts/d1-manager.js backup
|
||||
```
|
||||
|
||||
## Backup and Recovery
|
||||
|
||||
### Create Backups
|
||||
|
||||
```bash
|
||||
# Quick backup
|
||||
pnpm d1 export backup_$(date +%Y%m%d).sql
|
||||
|
||||
# Automated backup with timestamp
|
||||
npx wrangler d1 export d1-notso-livedash --output backups/backup_$(date +%Y%m%d_%H%M%S).sql
|
||||
|
||||
# Schema only backup
|
||||
npx wrangler d1 export d1-notso-livedash --no-data --output schema.sql
|
||||
```
|
||||
|
||||
### Restore from Backup
|
||||
|
||||
```bash
|
||||
# Apply SQL file to database
|
||||
npx wrangler d1 execute d1-notso-livedash --file backup.sql
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **"wrangler not found"**: Use `npx wrangler` instead of `wrangler`
|
||||
2. **Permission denied**: Ensure you're logged into Cloudflare: `npx wrangler login`
|
||||
3. **Database not found**: Check `wrangler.json` for correct binding name
|
||||
|
||||
### Debug Commands
|
||||
|
||||
```bash
|
||||
# Check Wrangler authentication
|
||||
npx wrangler whoami
|
||||
|
||||
# Verify database configuration
|
||||
npx wrangler d1 list
|
||||
|
||||
# Test database connectivity
|
||||
npx wrangler d1 execute d1-notso-livedash --command "SELECT 1"
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Local database is for development only
|
||||
- Never expose production database credentials
|
||||
- Use `--remote` flag carefully in production
|
||||
- Regular backups are recommended for production data
|
||||
@ -12,10 +12,10 @@ The WordCloud component visualizes categories or topics based on their frequency
|
||||
|
||||
**Features:**
|
||||
|
||||
- Dynamic sizing based on frequency
|
||||
- Colorful display with a pleasing color palette
|
||||
- Responsive design
|
||||
- Interactive hover effects
|
||||
- Dynamic sizing based on frequency
|
||||
- Colorful display with a pleasing color palette
|
||||
- Responsive design
|
||||
- Interactive hover effects
|
||||
|
||||
### 2. GeographicMap
|
||||
|
||||
@ -25,10 +25,10 @@ This component displays a world map with circles representing the number of sess
|
||||
|
||||
**Features:**
|
||||
|
||||
- Interactive map using React Leaflet
|
||||
- Circle sizes scaled by session count
|
||||
- Tooltips showing country names and session counts
|
||||
- Responsive design
|
||||
- Interactive map using React Leaflet
|
||||
- Circle sizes scaled by session count
|
||||
- Tooltips showing country names and session counts
|
||||
- Responsive design
|
||||
|
||||
### 3. MetricCard
|
||||
|
||||
@ -38,10 +38,10 @@ A modern, visually appealing card for displaying key metrics.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Multiple design variants (default, primary, success, warning, danger)
|
||||
- Support for trend indicators
|
||||
- Icons and descriptions
|
||||
- Clean, modern styling
|
||||
- Multiple design variants (default, primary, success, warning, danger)
|
||||
- Support for trend indicators
|
||||
- Icons and descriptions
|
||||
- Clean, modern styling
|
||||
|
||||
### 4. DonutChart
|
||||
|
||||
@ -51,10 +51,10 @@ An enhanced donut chart with better styling and a central text display capabilit
|
||||
|
||||
**Features:**
|
||||
|
||||
- Customizable colors
|
||||
- Center text area for displaying summaries
|
||||
- Interactive tooltips with percentages
|
||||
- Well-balanced legend display
|
||||
- Customizable colors
|
||||
- Center text area for displaying summaries
|
||||
- Interactive tooltips with percentages
|
||||
- Well-balanced legend display
|
||||
|
||||
### 5. ResponseTimeDistribution
|
||||
|
||||
@ -64,28 +64,28 @@ Visualizes the distribution of response times as a histogram.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Color-coded bars (green for fast, yellow for medium, red for slow)
|
||||
- Target time indicator
|
||||
- Automatic binning of response times
|
||||
- Clear labeling and scales
|
||||
- Color-coded bars (green for fast, yellow for medium, red for slow)
|
||||
- Target time indicator
|
||||
- Automatic binning of response times
|
||||
- Clear labeling and scales
|
||||
|
||||
## Dashboard Enhancements
|
||||
|
||||
The dashboard has been enhanced with:
|
||||
|
||||
1. **Improved Layout**: Better use of space and responsive grid layouts
|
||||
2. **Visual Hierarchies**: Clear heading styles and consistent spacing
|
||||
3. **Color Coding**: Semantic use of colors to indicate statuses
|
||||
4. **Interactive Elements**: Better button styles with loading indicators
|
||||
5. **Data Context**: More complete view of metrics with additional visualizations
|
||||
6. **Geographic Insights**: Map view of session distribution by country
|
||||
7. **Language Analysis**: Improved language distribution visualization
|
||||
8. **Category Analysis**: Word cloud for category popularity
|
||||
9. **Performance Metrics**: Response time distribution for better insight into system performance
|
||||
1. **Improved Layout**: Better use of space and responsive grid layouts
|
||||
2. **Visual Hierarchies**: Clear heading styles and consistent spacing
|
||||
3. **Color Coding**: Semantic use of colors to indicate statuses
|
||||
4. **Interactive Elements**: Better button styles with loading indicators
|
||||
5. **Data Context**: More complete view of metrics with additional visualizations
|
||||
6. **Geographic Insights**: Map view of session distribution by country
|
||||
7. **Language Analysis**: Improved language distribution visualization
|
||||
8. **Category Analysis**: Word cloud for category popularity
|
||||
9. **Performance Metrics**: Response time distribution for better insight into system performance
|
||||
|
||||
## Usage Notes
|
||||
|
||||
- The geographic map and response time distribution use simulated data where actual data is not available
|
||||
- All components are responsive and will adjust to different screen sizes
|
||||
- The dashboard automatically refreshes data when using the refresh button
|
||||
- Admin users have access to additional controls at the bottom of the dashboard
|
||||
- The geographic map and response time distribution use simulated data where actual data is not available
|
||||
- All components are responsive and will adjust to different screen sizes
|
||||
- The dashboard automatically refreshes data when using the refresh button
|
||||
- Admin users have access to additional controls at the bottom of the dashboard
|
||||
|
||||
@ -1,130 +0,0 @@
|
||||
# PostgreSQL Migration Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
Successfully migrated the livedash-node application from SQLite to PostgreSQL using Neon as the database provider. This migration provides better scalability, performance, and production-readiness.
|
||||
|
||||
## Migration Summary
|
||||
|
||||
### What Was Changed
|
||||
|
||||
1. **Database Provider**: Changed from SQLite to PostgreSQL in `prisma/schema.prisma`
|
||||
2. **Environment Configuration**: Updated to use environment-based database URL selection
|
||||
3. **Test Setup**: Configured separate test database using `DATABASE_URL_TEST`
|
||||
4. **Migration History**: Reset and created fresh PostgreSQL migrations
|
||||
|
||||
### Database Configuration
|
||||
|
||||
#### Production/Development
|
||||
|
||||
- **Provider**: PostgreSQL (Neon)
|
||||
- **Environment Variable**: `DATABASE_URL`
|
||||
- **Connection**: Neon PostgreSQL cluster
|
||||
|
||||
#### Testing
|
||||
|
||||
- **Provider**: PostgreSQL (Neon - separate database)
|
||||
- **Environment Variable**: `DATABASE_URL_TEST`
|
||||
- **Test Setup**: Automatically switches to test database during test runs
|
||||
|
||||
### Files Modified
|
||||
|
||||
1. **`prisma/schema.prisma`**
|
||||
|
||||
- Changed provider from `sqlite` to `postgresql`
|
||||
- Updated URL to use `env("DATABASE_URL")`
|
||||
|
||||
2. **`tests/setup.ts`**
|
||||
|
||||
- Added logic to use `DATABASE_URL_TEST` when available
|
||||
- Ensures test isolation with separate database
|
||||
|
||||
3. **`.env`** (created)
|
||||
|
||||
- Contains `DATABASE_URL` for Prisma CLI operations
|
||||
|
||||
4. **`.env.local`** (existing)
|
||||
|
||||
- Contains both `DATABASE_URL` and `DATABASE_URL_TEST`
|
||||
|
||||
### Database Schema
|
||||
|
||||
All existing models and relationships were preserved:
|
||||
|
||||
- **Company**: Multi-tenant root entity
|
||||
- **User**: Authentication and authorization
|
||||
- **Session**: Processed session data
|
||||
- **SessionImport**: Raw CSV import data
|
||||
- **Message**: Individual conversation messages
|
||||
- **Question**: Normalized question storage
|
||||
- **SessionQuestion**: Session-question relationships
|
||||
- **AIProcessingRequest**: AI cost tracking
|
||||
|
||||
### Migration Process
|
||||
|
||||
1. **Schema Update**: Changed provider to PostgreSQL
|
||||
2. **Migration Reset**: Removed SQLite migration history
|
||||
3. **Fresh Migration**: Created new PostgreSQL migration
|
||||
4. **Client Generation**: Generated new Prisma client for PostgreSQL
|
||||
5. **Database Seeding**: Applied initial seed data
|
||||
6. **Testing**: Verified all functionality works with PostgreSQL
|
||||
|
||||
### Benefits Achieved
|
||||
|
||||
✅ **Production-Ready**: PostgreSQL is enterprise-grade and scalable
|
||||
✅ **Better Performance**: Superior query performance and optimization
|
||||
✅ **Advanced Features**: Full JSON support, arrays, advanced indexing
|
||||
✅ **Test Isolation**: Separate test database prevents data conflicts
|
||||
✅ **Consistency**: Same database engine across all environments
|
||||
✅ **Cloud-Native**: Neon provides managed PostgreSQL with excellent DX
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```env
|
||||
# Production/Development Database
|
||||
DATABASE_URL="postgresql://user:pass@host/database?sslmode=require"
|
||||
|
||||
# Test Database (separate Neon database)
|
||||
DATABASE_URL_TEST="postgresql://user:pass@test-host/test-database?sslmode=require"
|
||||
```
|
||||
|
||||
### Test Configuration
|
||||
|
||||
Tests automatically use the test database when `DATABASE_URL_TEST` is set:
|
||||
|
||||
```typescript
|
||||
// In tests/setup.ts
|
||||
if (process.env.DATABASE_URL_TEST) {
|
||||
process.env.DATABASE_URL = process.env.DATABASE_URL_TEST;
|
||||
}
|
||||
```
|
||||
|
||||
### Verification
|
||||
|
||||
All tests pass successfully:
|
||||
|
||||
- ✅ Environment configuration tests
|
||||
- ✅ Transcript fetcher tests
|
||||
- ✅ Database connection tests
|
||||
- ✅ Schema validation tests
|
||||
- ✅ CRUD operation tests
|
||||
|
||||
### Next Steps
|
||||
|
||||
1. **Data Import**: Import production data if needed
|
||||
2. **Performance Monitoring**: Monitor query performance in production
|
||||
3. **Backup Strategy**: Configure automated backups via Neon
|
||||
4. **Connection Pooling**: Consider connection pooling for high-traffic scenarios
|
||||
|
||||
### Rollback Plan
|
||||
|
||||
If rollback is needed:
|
||||
|
||||
1. Revert `prisma/schema.prisma` to SQLite configuration
|
||||
2. Restore SQLite migration files from git history
|
||||
3. Update environment variables
|
||||
4. Run `prisma migrate reset` and `prisma generate`
|
||||
|
||||
## Conclusion
|
||||
|
||||
The PostgreSQL migration was successful and provides a solid foundation for production deployment. The application now benefits from PostgreSQL's advanced features while maintaining full test isolation and development workflow compatibility.
|
||||
@ -1,133 +0,0 @@
|
||||
# Processing System Refactor - Complete
|
||||
|
||||
## Overview
|
||||
|
||||
Successfully refactored the session processing pipeline from a simple status-based system to a comprehensive multi-stage processing status system. This addresses the original issues with the SessionImport table's `status` and `errorMsg` columns.
|
||||
|
||||
## Problems Solved
|
||||
|
||||
### Original Issues
|
||||
1. **Inconsistent Status Tracking**: The old system used a simple enum on SessionImport that didn't properly track the multi-stage processing pipeline
|
||||
2. **Poor Error Visibility**: Error messages were buried in the SessionImport table and not easily accessible
|
||||
3. **No Stage-Specific Tracking**: The system couldn't track which specific stage of processing failed
|
||||
4. **Difficult Recovery**: Failed sessions were hard to identify and retry
|
||||
5. **Linting Errors**: Multiple TypeScript files referencing removed database fields
|
||||
|
||||
### Schema Changes Made
|
||||
- **Removed** old `status`, `errorMsg`, and `processedAt` columns from SessionImport
|
||||
- **Removed** `processed` field from Session
|
||||
- **Added** new `SessionProcessingStatus` table with granular stage tracking
|
||||
- **Added** `ProcessingStage` and `ProcessingStatus` enums
|
||||
|
||||
## New Processing Pipeline
|
||||
|
||||
### Processing Stages
|
||||
```typescript
|
||||
enum ProcessingStage {
|
||||
CSV_IMPORT // SessionImport created
|
||||
TRANSCRIPT_FETCH // Transcript content fetched
|
||||
SESSION_CREATION // Session + Messages created
|
||||
AI_ANALYSIS // AI processing completed
|
||||
QUESTION_EXTRACTION // Questions extracted
|
||||
}
|
||||
|
||||
enum ProcessingStatus {
|
||||
PENDING, IN_PROGRESS, COMPLETED, FAILED, SKIPPED
|
||||
}
|
||||
```
|
||||
|
||||
### Key Components
|
||||
|
||||
#### 1. ProcessingStatusManager
|
||||
Centralized class for managing processing status with methods:
|
||||
- `initializeSession()` - Set up processing status for new sessions
|
||||
- `startStage()`, `completeStage()`, `failStage()`, `skipStage()` - Stage management
|
||||
- `getSessionsNeedingProcessing()` - Query sessions by stage and status
|
||||
- `getPipelineStatus()` - Get overview of entire pipeline
|
||||
- `getFailedSessions()` - Find sessions needing retry
|
||||
- `resetStageForRetry()` - Reset failed stages
|
||||
|
||||
#### 2. Updated Processing Scheduler
|
||||
- Integrated with new `ProcessingStatusManager`
|
||||
- Tracks AI analysis and question extraction stages
|
||||
- Records detailed processing metadata
|
||||
- Proper error handling and retry capabilities
|
||||
|
||||
#### 3. Migration System
|
||||
- Successfully migrated all 109 existing sessions
|
||||
- Determined current state based on existing data
|
||||
- Preserved all existing functionality
|
||||
|
||||
## Current Pipeline Status
|
||||
|
||||
After migration and refactoring:
|
||||
- **CSV_IMPORT**: 109 completed
|
||||
- **TRANSCRIPT_FETCH**: 109 completed
|
||||
- **SESSION_CREATION**: 109 completed
|
||||
- **AI_ANALYSIS**: 16 completed, 93 pending
|
||||
- **QUESTION_EXTRACTION**: 11 completed, 98 pending
|
||||
|
||||
## Files Updated/Created
|
||||
|
||||
### New Files
|
||||
- `lib/processingStatusManager.ts` - Core processing status management
|
||||
- `check-refactored-pipeline-status.ts` - New pipeline status checker
|
||||
- `migrate-to-refactored-system.ts` - Migration script
|
||||
- `docs/processing-system-refactor.md` - This documentation
|
||||
|
||||
### Updated Files
|
||||
- `prisma/schema.prisma` - Added new processing status tables
|
||||
- `lib/processingScheduler.ts` - Integrated with new status system
|
||||
- `debug-import-status.ts` - Updated to use new system
|
||||
- `fix-import-status.ts` - Updated to use new system
|
||||
|
||||
### Removed Files
|
||||
- `check-pipeline-status.ts` - Replaced by refactored version
|
||||
|
||||
## Benefits Achieved
|
||||
|
||||
1. **Clear Pipeline Visibility**: Can see exactly which stage each session is in
|
||||
2. **Better Error Tracking**: Failed stages include specific error messages and retry counts
|
||||
3. **Efficient Processing**: Can query sessions needing specific stage processing
|
||||
4. **Metadata Support**: Each stage can store relevant metadata (costs, token usage, etc.)
|
||||
5. **Easy Recovery**: Failed sessions can be easily identified and retried
|
||||
6. **Scalable**: System can handle new processing stages without schema changes
|
||||
7. **No Linting Errors**: All TypeScript compilation issues resolved
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Check Pipeline Status
|
||||
```bash
|
||||
npx tsx check-refactored-pipeline-status.ts
|
||||
```
|
||||
|
||||
### Debug Processing Issues
|
||||
```bash
|
||||
npx tsx debug-import-status.ts
|
||||
```
|
||||
|
||||
### Fix/Retry Failed Sessions
|
||||
```bash
|
||||
npx tsx fix-import-status.ts
|
||||
```
|
||||
|
||||
### Process Sessions
|
||||
```bash
|
||||
npx tsx test-ai-processing.ts
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Test AI Processing**: Run AI processing on pending sessions
|
||||
2. **Monitor Performance**: Watch for any issues with the new system
|
||||
3. **Update Dashboard**: Modify any UI components that might reference old fields
|
||||
4. **Documentation**: Update any API documentation that references the old system
|
||||
|
||||
## Migration Notes
|
||||
|
||||
- All existing data preserved
|
||||
- No data loss during migration
|
||||
- Backward compatibility maintained where possible
|
||||
- System ready for production use
|
||||
|
||||
The refactored system provides much better visibility into the processing pipeline and makes it easy to identify and resolve any issues that arise during session processing.
|
||||
@ -1,79 +0,0 @@
|
||||
# Scheduler Error Fixes
|
||||
|
||||
## Issues Identified and Resolved
|
||||
|
||||
### 1. Invalid Company Configuration
|
||||
|
||||
**Problem**: Company `26fc3d34-c074-4556-85bd-9a66fafc0e08` had an invalid CSV URL (`https://example.com/data.csv`) with no authentication credentials.
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Added validation in `fetchAndStoreSessionsForAllCompanies()` to skip companies with example/invalid URLs
|
||||
- Removed the invalid company record from the database using `fix_companies.js`
|
||||
|
||||
### 2. Transcript Fetching Errors
|
||||
|
||||
**Problem**: Multiple "Error fetching transcript: Unauthorized" messages were flooding the logs when individual transcript files couldn't be accessed.
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Improved error handling in `fetchTranscriptContent()` function
|
||||
- Added probabilistic logging (only ~10% of errors logged) to prevent log spam
|
||||
- Added timeout (10 seconds) for transcript fetching
|
||||
- Made transcript fetching failures non-blocking (sessions are still created without transcript content)
|
||||
|
||||
### 3. CSV Fetching Errors
|
||||
|
||||
**Problem**: "Failed to fetch CSV: Not Found" errors for companies with invalid URLs.
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Added URL validation to skip companies with `example.com` URLs
|
||||
- Improved error logging to be more descriptive
|
||||
|
||||
## Current Status
|
||||
|
||||
✅ **Fixed**: No more "Unauthorized" error spam
|
||||
✅ **Fixed**: No more "Not Found" CSV errors
|
||||
✅ **Fixed**: Scheduler runs cleanly without errors
|
||||
✅ **Improved**: Better error handling and logging
|
||||
|
||||
## Remaining Companies
|
||||
|
||||
After cleanup, only valid companies remain:
|
||||
|
||||
- **Demo Company** (`790b9233-d369-451f-b92c-f4dceb42b649`)
|
||||
- CSV URL: `https://proto.notso.ai/jumbo/chats`
|
||||
- Has valid authentication credentials
|
||||
- 107 sessions in database
|
||||
|
||||
## Files Modified
|
||||
|
||||
1. **lib/csvFetcher.js**
|
||||
|
||||
- Added company URL validation
|
||||
- Improved transcript fetching error handling
|
||||
- Reduced error log verbosity
|
||||
|
||||
2. **fix_companies.js** (cleanup script)
|
||||
- Removes invalid company records
|
||||
- Can be run again if needed
|
||||
|
||||
## Monitoring
|
||||
|
||||
The scheduler now runs cleanly every 15 minutes. To monitor:
|
||||
|
||||
```bash
|
||||
# Check scheduler logs
|
||||
node debug_db.js
|
||||
|
||||
# Test manual refresh
|
||||
node -e "import('./lib/csvFetcher.js').then(m => m.fetchAndStoreSessionsForAllCompanies())"
|
||||
```
|
||||
|
||||
## Future Improvements
|
||||
|
||||
1. Add health check endpoint for scheduler status
|
||||
2. Add metrics for successful/failed fetches
|
||||
3. Consider retry logic for temporary failures
|
||||
4. Add alerting for persistent failures
|
||||
@ -1,211 +0,0 @@
|
||||
# Scheduler Workflow Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The LiveDash system has two main schedulers that work together to fetch and process session data:
|
||||
|
||||
1. **Session Refresh Scheduler** - Fetches new sessions from CSV files
|
||||
2. **Processing Scheduler** - Processes session transcripts with AI
|
||||
|
||||
## Current Status (as of latest check)
|
||||
|
||||
- **Total sessions**: 107
|
||||
- **Processed sessions**: 0
|
||||
- **Sessions with transcript**: 0
|
||||
- **Ready for processing**: 0
|
||||
|
||||
## How the `processed` Field Works
|
||||
|
||||
The ProcessingScheduler picks up sessions where `processed` is **NOT** `true`, which includes:
|
||||
|
||||
- `processed = false`
|
||||
- `processed = null`
|
||||
|
||||
**Query used:**
|
||||
|
||||
```javascript
|
||||
{ processed: { not: true } } // Either false or null
|
||||
```
|
||||
|
||||
## Complete Workflow
|
||||
|
||||
### Step 1: Session Refresh (CSV Fetching)
|
||||
|
||||
**What it does:**
|
||||
|
||||
- Fetches session data from company CSV URLs
|
||||
- Creates session records in database with basic metadata
|
||||
- Sets `transcriptContent = null` initially
|
||||
- Sets `processed = null` initially
|
||||
|
||||
**Runs:** Every 30 minutes (cron: `*/30 * * * *`)
|
||||
|
||||
### Step 2: Transcript Fetching
|
||||
|
||||
**What it does:**
|
||||
|
||||
- Downloads full transcript content for sessions
|
||||
- Updates `transcriptContent` field with actual conversation data
|
||||
- Sessions remain `processed = null` until AI processing
|
||||
|
||||
**Runs:** As part of session refresh process
|
||||
|
||||
### Step 3: AI Processing
|
||||
|
||||
**What it does:**
|
||||
|
||||
- Finds sessions with transcript content where `processed != true`
|
||||
- Sends transcripts to OpenAI for analysis
|
||||
- Extracts: sentiment, category, questions, summary, etc.
|
||||
- Updates session with processed data
|
||||
- Sets `processed = true`
|
||||
|
||||
**Runs:** Every hour (cron: `0 * * * *`)
|
||||
|
||||
## Manual Trigger Commands
|
||||
|
||||
### Check Current Status
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
### Trigger Session Refresh (Fetch new sessions from CSV)
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js refresh
|
||||
```
|
||||
|
||||
### Trigger AI Processing (Process unprocessed sessions)
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js process
|
||||
```
|
||||
|
||||
### Run Both Schedulers
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js both
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### No Sessions Being Processed?
|
||||
|
||||
1. **Check if sessions have transcripts:**
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
2. **If "Sessions with transcript" is 0:**
|
||||
- Sessions exist but transcripts haven't been fetched yet
|
||||
- Run session refresh: `node scripts/manual-triggers.js refresh`
|
||||
|
||||
3. **If "Ready for processing" is 0 but "Sessions with transcript" > 0:**
|
||||
- All sessions with transcripts have already been processed
|
||||
- Check if `OPENAI_API_KEY` is set in environment
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "No sessions found requiring processing"
|
||||
|
||||
- All sessions with transcripts have been processed (`processed = true`)
|
||||
- Or no sessions have transcript content yet
|
||||
|
||||
#### "OPENAI_API_KEY environment variable is not set"
|
||||
|
||||
- Add OpenAI API key to `.env.development` file
|
||||
- Restart the application
|
||||
|
||||
#### "Error fetching transcript: Unauthorized"
|
||||
|
||||
- CSV credentials are incorrect or expired
|
||||
- Check company CSV username/password in database
|
||||
|
||||
## Database Field Mapping
|
||||
|
||||
### Before AI Processing
|
||||
|
||||
```javascript
|
||||
{
|
||||
id: "session-uuid",
|
||||
transcriptContent: "full conversation text" | null,
|
||||
processed: null,
|
||||
sentimentCategory: null,
|
||||
questions: null,
|
||||
summary: null,
|
||||
// ... other fields
|
||||
}
|
||||
```
|
||||
|
||||
### After AI Processing
|
||||
|
||||
```javascript
|
||||
{
|
||||
id: "session-uuid",
|
||||
transcriptContent: "full conversation text",
|
||||
processed: true,
|
||||
sentimentCategory: "positive" | "neutral" | "negative",
|
||||
questions: '["question 1", "question 2"]', // JSON string
|
||||
summary: "Brief conversation summary",
|
||||
language: "en", // ISO 639-1 code
|
||||
messagesSent: 5,
|
||||
sentiment: 0.8, // Float value (-1 to 1)
|
||||
escalated: false,
|
||||
forwardedHr: false,
|
||||
category: "Schedule & Hours",
|
||||
// ... other fields
|
||||
}
|
||||
```
|
||||
|
||||
## Scheduler Configuration
|
||||
|
||||
### Session Refresh Scheduler
|
||||
|
||||
- **File**: `lib/scheduler.js`
|
||||
- **Frequency**: Every 30 minutes
|
||||
- **Cron**: `*/30 * * * *`
|
||||
|
||||
### Processing Scheduler
|
||||
|
||||
- **File**: `lib/processingScheduler.js`
|
||||
- **Frequency**: Every hour
|
||||
- **Cron**: `0 * * * *`
|
||||
- **Batch size**: 10 sessions per run
|
||||
|
||||
## Environment Variables Required
|
||||
|
||||
```bash
|
||||
# Database
|
||||
DATABASE_URL="postgresql://..."
|
||||
|
||||
# OpenAI (for processing)
|
||||
OPENAI_API_KEY="sk-..."
|
||||
|
||||
# NextAuth
|
||||
NEXTAUTH_SECRET="..."
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
```
|
||||
|
||||
## Next Steps for Testing
|
||||
|
||||
1. **Trigger session refresh** to fetch transcripts:
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js refresh
|
||||
```
|
||||
|
||||
2. **Check status** to see if transcripts were fetched:
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
3. **Trigger processing** if transcripts are available:
|
||||
|
||||
```bash
|
||||
node scripts/manual-triggers.js process
|
||||
```
|
||||
|
||||
4. **View results** in the dashboard session details pages
|
||||
@ -1,86 +0,0 @@
|
||||
# Session Processing with OpenAI
|
||||
|
||||
This document explains how the session processing system works in LiveDash-Node.
|
||||
|
||||
## Overview
|
||||
|
||||
The system now includes an automated process for analyzing chat session transcripts using OpenAI's API. This process:
|
||||
|
||||
1. Fetches session data from CSV sources
|
||||
2. Only adds new sessions that don't already exist in the database
|
||||
3. Processes session transcripts with OpenAI to extract valuable insights
|
||||
4. Updates the database with the processed information
|
||||
|
||||
## How It Works
|
||||
|
||||
### Session Fetching
|
||||
|
||||
- The system fetches session data from configured CSV URLs for each company
|
||||
- Unlike the previous implementation, it now only adds sessions that don't already exist in the database
|
||||
- This prevents duplicate sessions and allows for incremental updates
|
||||
|
||||
### Transcript Processing
|
||||
|
||||
- For sessions with transcript content that haven't been processed yet, the system calls OpenAI's API
|
||||
- The API analyzes the transcript and extracts the following information:
|
||||
- Primary language used (ISO 639-1 code)
|
||||
- Number of messages sent by the user
|
||||
- Overall sentiment (positive, neutral, negative)
|
||||
- Whether the conversation was escalated
|
||||
- Whether HR contact was mentioned or provided
|
||||
- Best-fitting category for the conversation
|
||||
- Up to 5 paraphrased questions asked by the user
|
||||
- A brief summary of the conversation
|
||||
|
||||
### Scheduling
|
||||
|
||||
The system includes two schedulers:
|
||||
|
||||
1. **Session Refresh Scheduler**: Runs every 15 minutes to fetch new sessions from CSV sources
|
||||
2. **Session Processing Scheduler**: Runs every hour to process unprocessed sessions with OpenAI
|
||||
|
||||
## Database Schema
|
||||
|
||||
The Session model has been updated with new fields to store the processed data:
|
||||
|
||||
- `processed`: Boolean flag indicating whether the session has been processed
|
||||
- `sentimentCategory`: String value ("positive", "neutral", "negative") from OpenAI
|
||||
- `questions`: JSON array of questions asked by the user
|
||||
- `summary`: Brief summary of the conversation
|
||||
|
||||
## Configuration
|
||||
|
||||
### OpenAI API Key
|
||||
|
||||
To use the session processing feature, you need to add your OpenAI API key to the `.env.local` file:
|
||||
|
||||
```ini
|
||||
OPENAI_API_KEY=your_api_key_here
|
||||
```
|
||||
|
||||
### Running with Schedulers
|
||||
|
||||
To run the application with schedulers enabled:
|
||||
|
||||
- Development: `npm run dev`
|
||||
- Development (with schedulers disabled): `npm run dev:no-schedulers`
|
||||
- Production: `npm run start`
|
||||
|
||||
Note: These commands will start a custom Next.js server with the schedulers enabled. You'll need to have an OpenAI API key set in your `.env.local` file for the session processing to work.
|
||||
|
||||
## Manual Processing
|
||||
|
||||
You can also manually process sessions by running the script:
|
||||
|
||||
```
|
||||
node scripts/process_sessions.mjs
|
||||
```
|
||||
|
||||
This will process all unprocessed sessions that have transcript content.
|
||||
|
||||
## Customization
|
||||
|
||||
The processing logic can be customized by modifying:
|
||||
|
||||
- `lib/processingScheduler.ts`: Contains the OpenAI processing logic
|
||||
- `scripts/process_sessions.ts`: Standalone script for manual processing
|
||||
@ -1,228 +0,0 @@
|
||||
# Transcript Parsing Implementation
|
||||
|
||||
## Overview
|
||||
|
||||
Added structured message parsing to the LiveDash system, allowing transcripts to be broken down into individual messages with timestamps, roles, and content. This provides a much better user experience for viewing conversations.
|
||||
|
||||
## Database Changes
|
||||
|
||||
### New Message Table
|
||||
|
||||
```sql
|
||||
CREATE TABLE Message (
|
||||
id TEXT PRIMARY KEY DEFAULT (uuid()),
|
||||
sessionId TEXT NOT NULL,
|
||||
timestamp DATETIME NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
order INTEGER NOT NULL,
|
||||
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (sessionId) REFERENCES Session(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX Message_sessionId_order_idx ON Message(sessionId, order);
|
||||
```
|
||||
|
||||
### Updated Session Table
|
||||
|
||||
- Added `messages` relation to Session model
|
||||
- Sessions can now have both raw transcript content AND parsed messages
|
||||
|
||||
## New Components
|
||||
|
||||
### 1. Message Interface (`lib/types.ts`)
|
||||
|
||||
```typescript
|
||||
export interface Message {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
timestamp: Date;
|
||||
role: string; // "User", "Assistant", "System", etc.
|
||||
content: string;
|
||||
order: number; // Order within the conversation (0, 1, 2, ...)
|
||||
createdAt: Date;
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Transcript Parser (`lib/transcriptParser.js`)
|
||||
|
||||
- **`parseChatLogToJSON(logString)`** - Parses raw transcript text into structured messages
|
||||
- **`storeMessagesForSession(sessionId, messages)`** - Stores parsed messages in database
|
||||
- **`processTranscriptForSession(sessionId, transcriptContent)`** - Complete processing for one session
|
||||
- **`processAllUnparsedTranscripts()`** - Batch process all unparsed transcripts
|
||||
- **`getMessagesForSession(sessionId)`** - Retrieve messages for a session
|
||||
|
||||
### 3. MessageViewer Component (`components/MessageViewer.tsx`)
|
||||
|
||||
- Chat-like interface for displaying parsed messages
|
||||
- Color-coded by role (User: blue, Assistant: gray, System: yellow)
|
||||
- Shows timestamps and message order
|
||||
- Scrollable with conversation metadata
|
||||
|
||||
## Updated Components
|
||||
|
||||
### 1. Session API (`pages/api/dashboard/session/[id].ts`)
|
||||
|
||||
- Now includes parsed messages in session response
|
||||
- Messages are ordered by `order` field (ascending)
|
||||
|
||||
### 2. Session Details Page (`app/dashboard/sessions/[id]/page.tsx`)
|
||||
|
||||
- Added MessageViewer component
|
||||
- Shows both parsed messages AND raw transcript
|
||||
- Prioritizes parsed messages when available
|
||||
|
||||
### 3. ChatSession Interface (`lib/types.ts`)
|
||||
|
||||
- Added optional `messages?: Message[]` field
|
||||
|
||||
## Parsing Logic
|
||||
|
||||
### Supported Format
|
||||
|
||||
The parser expects transcript format:
|
||||
|
||||
```
|
||||
[DD.MM.YYYY HH:MM:SS] Role: Message content
|
||||
[DD.MM.YYYY HH:MM:SS] User: Hello, I need help
|
||||
[DD.MM.YYYY HH:MM:SS] Assistant: How can I help you today?
|
||||
```
|
||||
|
||||
### Features
|
||||
|
||||
- **Multi-line support** - Messages can span multiple lines
|
||||
- **Timestamp parsing** - Converts DD.MM.YYYY HH:MM:SS to ISO format
|
||||
- **Role detection** - Extracts sender role from each message
|
||||
- **Ordering** - Maintains conversation order with explicit order field
|
||||
- **Sorting** - Messages sorted by timestamp, then by role (User before Assistant)
|
||||
|
||||
## Manual Commands
|
||||
|
||||
### New Commands Added
|
||||
|
||||
```bash
|
||||
# Parse transcripts into structured messages
|
||||
node scripts/manual-triggers.js parse
|
||||
|
||||
# Complete workflow: refresh → parse → process
|
||||
node scripts/manual-triggers.js all
|
||||
|
||||
# Check status (now shows parsing info)
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
### Updated Commands
|
||||
|
||||
- **`status`** - Now shows transcript and parsing statistics
|
||||
- **`all`** - New command that runs refresh → parse → process in sequence
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### Complete Processing Pipeline
|
||||
|
||||
1. **Session Refresh** - Fetch sessions from CSV, download transcripts
|
||||
2. **Transcript Parsing** - Parse raw transcripts into structured messages
|
||||
3. **AI Processing** - Process sessions with OpenAI for sentiment, categories, etc.
|
||||
|
||||
### Database States
|
||||
|
||||
```javascript
|
||||
// After CSV fetch
|
||||
{
|
||||
transcriptContent: "raw text...",
|
||||
messages: [], // Empty
|
||||
processed: null
|
||||
}
|
||||
|
||||
// After parsing
|
||||
{
|
||||
transcriptContent: "raw text...",
|
||||
messages: [Message, Message, ...], // Parsed
|
||||
processed: null
|
||||
}
|
||||
|
||||
// After AI processing
|
||||
{
|
||||
transcriptContent: "raw text...",
|
||||
messages: [Message, Message, ...], // Parsed
|
||||
processed: true,
|
||||
sentimentCategory: "positive",
|
||||
summary: "Brief summary...",
|
||||
// ... other AI fields
|
||||
}
|
||||
```
|
||||
|
||||
## User Experience Improvements
|
||||
|
||||
### Before
|
||||
|
||||
- Only raw transcript text in a text area
|
||||
- Difficult to follow conversation flow
|
||||
- No clear distinction between speakers
|
||||
|
||||
### After
|
||||
|
||||
- **Chat-like interface** with message bubbles
|
||||
- **Color-coded roles** for easy identification
|
||||
- **Timestamps** for each message
|
||||
- **Conversation metadata** (first/last message times)
|
||||
- **Fallback to raw transcript** if parsing fails
|
||||
- **Both views available** - structured AND raw
|
||||
|
||||
## Testing
|
||||
|
||||
### Manual Testing Commands
|
||||
|
||||
```bash
|
||||
# Check current status
|
||||
node scripts/manual-triggers.js status
|
||||
|
||||
# Parse existing transcripts
|
||||
node scripts/manual-triggers.js parse
|
||||
|
||||
# Full pipeline test
|
||||
node scripts/manual-triggers.js all
|
||||
```
|
||||
|
||||
### Expected Results
|
||||
|
||||
1. Sessions with transcript content get parsed into individual messages
|
||||
2. Session detail pages show chat-like interface
|
||||
3. Both parsed messages and raw transcript are available
|
||||
4. No data loss - original transcript content preserved
|
||||
|
||||
## Technical Benefits
|
||||
|
||||
### Performance
|
||||
|
||||
- **Indexed queries** - Messages indexed by sessionId and order
|
||||
- **Efficient loading** - Only load messages when needed
|
||||
- **Cascading deletes** - Messages automatically deleted with sessions
|
||||
|
||||
### Maintainability
|
||||
|
||||
- **Separation of concerns** - Parsing logic isolated in dedicated module
|
||||
- **Type safety** - Full TypeScript support for Message interface
|
||||
- **Error handling** - Graceful fallbacks when parsing fails
|
||||
|
||||
### Extensibility
|
||||
|
||||
- **Role flexibility** - Supports any role names (User, Assistant, System, etc.)
|
||||
- **Content preservation** - Multi-line messages fully supported
|
||||
- **Metadata ready** - Easy to add message-level metadata in future
|
||||
|
||||
## Migration Notes
|
||||
|
||||
### Existing Data
|
||||
|
||||
- **No data loss** - Original transcript content preserved
|
||||
- **Backward compatibility** - Pages work with or without parsed messages
|
||||
- **Gradual migration** - Can parse transcripts incrementally
|
||||
|
||||
### Database Migration
|
||||
|
||||
- New Message table created with foreign key constraints
|
||||
- Existing Session table unchanged (only added relation)
|
||||
- Index created for efficient message queries
|
||||
|
||||
This implementation provides a solid foundation for enhanced conversation analysis and user experience while maintaining full backward compatibility.
|
||||
@ -1,88 +0,0 @@
|
||||
import { PrismaClient, ProcessingStage, ProcessingStatus } from '@prisma/client';
|
||||
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function fixProcessingStatus() {
|
||||
try {
|
||||
console.log('=== FIXING PROCESSING STATUS (REFACTORED SYSTEM) ===\n');
|
||||
|
||||
// Check for any failed processing stages that might need retry
|
||||
const failedSessions = await ProcessingStatusManager.getFailedSessions();
|
||||
|
||||
console.log(`Found ${failedSessions.length} failed processing stages`);
|
||||
|
||||
if (failedSessions.length > 0) {
|
||||
console.log('\nFailed sessions by stage:');
|
||||
const failuresByStage: Record<string, number> = {};
|
||||
|
||||
failedSessions.forEach(failure => {
|
||||
failuresByStage[failure.stage] = (failuresByStage[failure.stage] || 0) + 1;
|
||||
});
|
||||
|
||||
Object.entries(failuresByStage).forEach(([stage, count]) => {
|
||||
console.log(` ${stage}: ${count} failures`);
|
||||
});
|
||||
|
||||
// Show sample failed sessions
|
||||
console.log('\nSample failed sessions:');
|
||||
failedSessions.slice(0, 5).forEach(failure => {
|
||||
console.log(` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`);
|
||||
});
|
||||
|
||||
// Ask if user wants to reset failed stages for retry
|
||||
console.log('\nTo reset failed stages for retry, you can use:');
|
||||
console.log('ProcessingStatusManager.resetStageForRetry(sessionId, stage)');
|
||||
}
|
||||
|
||||
// Check for sessions that might be stuck in IN_PROGRESS
|
||||
const stuckSessions = await prisma.sessionProcessingStatus.findMany({
|
||||
where: {
|
||||
status: ProcessingStatus.IN_PROGRESS,
|
||||
startedAt: {
|
||||
lt: new Date(Date.now() - 30 * 60 * 1000) // Started more than 30 minutes ago
|
||||
}
|
||||
},
|
||||
include: {
|
||||
session: {
|
||||
include: {
|
||||
import: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (stuckSessions.length > 0) {
|
||||
console.log(`\nFound ${stuckSessions.length} sessions stuck in IN_PROGRESS state:`);
|
||||
stuckSessions.forEach(stuck => {
|
||||
console.log(` ${stuck.session.import?.externalSessionId || stuck.sessionId}: ${stuck.stage} (started: ${stuck.startedAt})`);
|
||||
});
|
||||
|
||||
console.log('\nThese sessions may need to be reset to PENDING status for retry.');
|
||||
}
|
||||
|
||||
// Show current pipeline status
|
||||
console.log('\n=== CURRENT PIPELINE STATUS ===');
|
||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||
|
||||
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||
|
||||
for (const stage of stages) {
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
const pending = stageData.PENDING || 0;
|
||||
const inProgress = stageData.IN_PROGRESS || 0;
|
||||
const completed = stageData.COMPLETED || 0;
|
||||
const failed = stageData.FAILED || 0;
|
||||
const skipped = stageData.SKIPPED || 0;
|
||||
|
||||
console.log(`${stage}: ${completed} completed, ${pending} pending, ${inProgress} in progress, ${failed} failed, ${skipped} skipped`);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fixing processing status:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
fixProcessingStatus();
|
||||
@ -1,41 +1,440 @@
|
||||
// Simplified CSV fetcher - fetches and parses CSV data without any processing
|
||||
// Maps directly to SessionImport table fields
|
||||
// Fetches, parses, and returns chat session data for a company from a CSV URL
|
||||
import fetch from "node-fetch";
|
||||
import { parse } from "csv-parse/sync";
|
||||
import ISO6391 from "iso-639-1";
|
||||
import countries from "i18n-iso-countries";
|
||||
|
||||
// Raw CSV data interface matching SessionImport schema
|
||||
interface RawSessionImport {
|
||||
externalSessionId: string;
|
||||
startTimeRaw: string;
|
||||
endTimeRaw: string;
|
||||
ipAddress: string | null;
|
||||
countryCode: string | null;
|
||||
language: string | null;
|
||||
messagesSent: number | null;
|
||||
sentimentRaw: string | null;
|
||||
escalatedRaw: string | null;
|
||||
forwardedHrRaw: string | null;
|
||||
fullTranscriptUrl: string | null;
|
||||
avgResponseTimeSeconds: number | null;
|
||||
tokens: number | null;
|
||||
tokensEur: number | null;
|
||||
category: string | null;
|
||||
initialMessage: string | null;
|
||||
// Register locales for i18n-iso-countries
|
||||
import enLocale from "i18n-iso-countries/langs/en.json" with { type: "json" };
|
||||
countries.registerLocale(enLocale);
|
||||
|
||||
// This type is used internally for parsing the CSV records
|
||||
interface CSVRecord {
|
||||
session_id: string;
|
||||
start_time: string;
|
||||
end_time?: string;
|
||||
ip_address?: string;
|
||||
country?: string;
|
||||
language?: string;
|
||||
messages_sent?: string;
|
||||
sentiment?: string;
|
||||
escalated?: string;
|
||||
forwarded_hr?: string;
|
||||
full_transcript_url?: string;
|
||||
avg_response_time?: string;
|
||||
tokens?: string;
|
||||
tokens_eur?: string;
|
||||
category?: string;
|
||||
initial_msg?: string;
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
interface SessionData {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
startTime: Date;
|
||||
endTime: Date | null;
|
||||
ipAddress?: string;
|
||||
country?: string | null; // Will store ISO 3166-1 alpha-2 country code or null/undefined
|
||||
language?: string | null; // Will store ISO 639-1 language code or null/undefined
|
||||
messagesSent: number;
|
||||
sentiment: number | null;
|
||||
escalated: boolean;
|
||||
forwardedHr: boolean;
|
||||
fullTranscriptUrl?: string | null;
|
||||
avgResponseTime: number | null;
|
||||
tokens: number;
|
||||
tokensEur: number;
|
||||
category?: string | null;
|
||||
initialMsg?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches and parses CSV data from a URL without any processing
|
||||
* Maps CSV columns by position to SessionImport fields
|
||||
* @param url The CSV URL
|
||||
* @param username Optional username for authentication
|
||||
* @param password Optional password for authentication
|
||||
* @returns Array of raw session import data
|
||||
* Converts country names to ISO 3166-1 alpha-2 codes
|
||||
* @param countryStr Raw country string from CSV
|
||||
* @returns ISO 3166-1 alpha-2 country code or null if not found
|
||||
*/
|
||||
function getCountryCode(countryStr?: string): string | null | undefined {
|
||||
if (countryStr === undefined) return undefined;
|
||||
if (countryStr === null || countryStr === "") return null;
|
||||
|
||||
// Clean the input
|
||||
const normalized = countryStr.trim();
|
||||
if (!normalized) return null;
|
||||
|
||||
// Direct ISO code check (if already a 2-letter code)
|
||||
if (normalized.length === 2 && normalized === normalized.toUpperCase()) {
|
||||
return countries.isValid(normalized) ? normalized : null;
|
||||
}
|
||||
|
||||
// Special case for country codes used in the dataset
|
||||
const countryMapping: Record<string, string> = {
|
||||
BA: "BA", // Bosnia and Herzegovina
|
||||
NL: "NL", // Netherlands
|
||||
USA: "US", // United States
|
||||
UK: "GB", // United Kingdom
|
||||
GB: "GB", // Great Britain
|
||||
Nederland: "NL",
|
||||
Netherlands: "NL",
|
||||
Netherland: "NL",
|
||||
Holland: "NL",
|
||||
Germany: "DE",
|
||||
Deutschland: "DE",
|
||||
Belgium: "BE",
|
||||
België: "BE",
|
||||
Belgique: "BE",
|
||||
France: "FR",
|
||||
Frankreich: "FR",
|
||||
"United States": "US",
|
||||
"United States of America": "US",
|
||||
Bosnia: "BA",
|
||||
"Bosnia and Herzegovina": "BA",
|
||||
"Bosnia & Herzegovina": "BA",
|
||||
};
|
||||
|
||||
// Check mapping
|
||||
if (normalized in countryMapping) {
|
||||
return countryMapping[normalized];
|
||||
}
|
||||
|
||||
// Try to get the code from the country name (in English)
|
||||
try {
|
||||
const code = countries.getAlpha2Code(normalized, "en");
|
||||
if (code) return code;
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[CSV] Error converting country name to code: ${normalized} - ${error}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// If all else fails, return null
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts language names to ISO 639-1 codes
|
||||
* @param languageStr Raw language string from CSV
|
||||
* @returns ISO 639-1 language code or null if not found
|
||||
*/
|
||||
function getLanguageCode(languageStr?: string): string | null | undefined {
|
||||
if (languageStr === undefined) return undefined;
|
||||
if (languageStr === null || languageStr === "") return null;
|
||||
|
||||
// Clean the input
|
||||
const normalized = languageStr.trim();
|
||||
if (!normalized) return null;
|
||||
|
||||
// Direct ISO code check (if already a 2-letter code)
|
||||
if (normalized.length === 2 && normalized === normalized.toLowerCase()) {
|
||||
return ISO6391.validate(normalized) ? normalized : null;
|
||||
}
|
||||
|
||||
// Special case mappings
|
||||
const languageMapping: Record<string, string> = {
|
||||
english: "en",
|
||||
English: "en",
|
||||
dutch: "nl",
|
||||
Dutch: "nl",
|
||||
nederlands: "nl",
|
||||
Nederlands: "nl",
|
||||
nl: "nl",
|
||||
bosnian: "bs",
|
||||
Bosnian: "bs",
|
||||
turkish: "tr",
|
||||
Turkish: "tr",
|
||||
german: "de",
|
||||
German: "de",
|
||||
deutsch: "de",
|
||||
Deutsch: "de",
|
||||
french: "fr",
|
||||
French: "fr",
|
||||
français: "fr",
|
||||
Français: "fr",
|
||||
spanish: "es",
|
||||
Spanish: "es",
|
||||
español: "es",
|
||||
Español: "es",
|
||||
italian: "it",
|
||||
Italian: "it",
|
||||
italiano: "it",
|
||||
Italiano: "it",
|
||||
nizozemski: "nl", // "Dutch" in some Slavic languages
|
||||
};
|
||||
|
||||
// Check mapping
|
||||
if (normalized in languageMapping) {
|
||||
return languageMapping[normalized];
|
||||
}
|
||||
|
||||
// Try to get code using the ISO6391 library
|
||||
try {
|
||||
const code = ISO6391.getCode(normalized);
|
||||
if (code) return code;
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[CSV] Error converting language name to code: ${normalized} - ${error}\n`
|
||||
);
|
||||
}
|
||||
// If all else fails, return null
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes category values to standard groups
|
||||
* @param categoryStr The raw category string from CSV
|
||||
* @returns A normalized category string
|
||||
*/
|
||||
function normalizeCategory(categoryStr?: string): string | null {
|
||||
if (!categoryStr) return null;
|
||||
|
||||
const normalized = categoryStr.toLowerCase().trim();
|
||||
|
||||
// Define category groups using keywords
|
||||
const categoryMapping: Record<string, string[]> = {
|
||||
Onboarding: [
|
||||
"onboarding",
|
||||
"start",
|
||||
"begin",
|
||||
"new",
|
||||
"orientation",
|
||||
"welcome",
|
||||
"intro",
|
||||
"getting started",
|
||||
"documents",
|
||||
"documenten",
|
||||
"first day",
|
||||
"eerste dag",
|
||||
],
|
||||
"General Information": [
|
||||
"general",
|
||||
"algemeen",
|
||||
"info",
|
||||
"information",
|
||||
"informatie",
|
||||
"question",
|
||||
"vraag",
|
||||
"inquiry",
|
||||
"chat",
|
||||
"conversation",
|
||||
"gesprek",
|
||||
"talk",
|
||||
],
|
||||
Greeting: [
|
||||
"greeting",
|
||||
"greet",
|
||||
"hello",
|
||||
"hi",
|
||||
"hey",
|
||||
"welcome",
|
||||
"hallo",
|
||||
"hoi",
|
||||
"greetings",
|
||||
],
|
||||
"HR & Payroll": [
|
||||
"salary",
|
||||
"salaris",
|
||||
"pay",
|
||||
"payroll",
|
||||
"loon",
|
||||
"loonstrook",
|
||||
"hr",
|
||||
"human resources",
|
||||
"benefits",
|
||||
"vacation",
|
||||
"leave",
|
||||
"verlof",
|
||||
"maaltijdvergoeding",
|
||||
"vergoeding",
|
||||
],
|
||||
"Schedules & Hours": [
|
||||
"schedule",
|
||||
"hours",
|
||||
"tijd",
|
||||
"time",
|
||||
"roster",
|
||||
"rooster",
|
||||
"planning",
|
||||
"shift",
|
||||
"dienst",
|
||||
"working hours",
|
||||
"werktijden",
|
||||
"openingstijden",
|
||||
],
|
||||
"Role & Responsibilities": [
|
||||
"role",
|
||||
"job",
|
||||
"function",
|
||||
"functie",
|
||||
"task",
|
||||
"taak",
|
||||
"responsibilities",
|
||||
"leidinggevende",
|
||||
"manager",
|
||||
"teamleider",
|
||||
"supervisor",
|
||||
"team",
|
||||
"lead",
|
||||
],
|
||||
"Technical Support": [
|
||||
"technical",
|
||||
"tech",
|
||||
"support",
|
||||
"laptop",
|
||||
"computer",
|
||||
"system",
|
||||
"systeem",
|
||||
"it",
|
||||
"software",
|
||||
"hardware",
|
||||
],
|
||||
Offboarding: [
|
||||
"offboarding",
|
||||
"leave",
|
||||
"exit",
|
||||
"quit",
|
||||
"resign",
|
||||
"resignation",
|
||||
"ontslag",
|
||||
"vertrek",
|
||||
"afsluiting",
|
||||
],
|
||||
};
|
||||
|
||||
// Try to match the category using keywords
|
||||
for (const [category, keywords] of Object.entries(categoryMapping)) {
|
||||
if (keywords.some((keyword) => normalized.includes(keyword))) {
|
||||
return category;
|
||||
}
|
||||
}
|
||||
|
||||
// If no match, return "Other"
|
||||
return "Other";
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts sentiment string values to numeric scores
|
||||
* @param sentimentStr The sentiment string from the CSV
|
||||
* @returns A numeric score representing the sentiment
|
||||
*/
|
||||
function mapSentimentToScore(sentimentStr?: string): number | null {
|
||||
if (!sentimentStr) return null;
|
||||
|
||||
// Convert to lowercase for case-insensitive matching
|
||||
const sentiment = sentimentStr.toLowerCase();
|
||||
|
||||
// Map sentiment strings to numeric values on a scale from -1 to 2
|
||||
const sentimentMap: Record<string, number> = {
|
||||
happy: 1.0,
|
||||
excited: 1.5,
|
||||
positive: 0.8,
|
||||
neutral: 0.0,
|
||||
playful: 0.7,
|
||||
negative: -0.8,
|
||||
angry: -1.0,
|
||||
sad: -0.7,
|
||||
frustrated: -0.9,
|
||||
positief: 0.8, // Dutch
|
||||
neutraal: 0.0, // Dutch
|
||||
negatief: -0.8, // Dutch
|
||||
positivo: 0.8, // Spanish/Italian
|
||||
neutro: 0.0, // Spanish/Italian
|
||||
negativo: -0.8, // Spanish/Italian
|
||||
yes: 0.5, // For any "yes" sentiment
|
||||
no: -0.5, // For any "no" sentiment
|
||||
};
|
||||
|
||||
return sentimentMap[sentiment] !== undefined
|
||||
? sentimentMap[sentiment]
|
||||
: isNaN(parseFloat(sentiment))
|
||||
? null
|
||||
: parseFloat(sentiment);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a string value should be considered as boolean true
|
||||
* @param value The string value to check
|
||||
* @returns True if the string indicates a positive/true value
|
||||
*/
|
||||
function isTruthyValue(value?: string): boolean {
|
||||
if (!value) return false;
|
||||
|
||||
const truthyValues = [
|
||||
"1",
|
||||
"true",
|
||||
"yes",
|
||||
"y",
|
||||
"ja",
|
||||
"si",
|
||||
"oui",
|
||||
"да",
|
||||
"да",
|
||||
"はい",
|
||||
];
|
||||
|
||||
return truthyValues.includes(value.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely parses a date string into a Date object.
|
||||
* Handles potential errors and various formats, prioritizing D-M-YYYY HH:MM:SS.
|
||||
* @param dateStr The date string to parse.
|
||||
* @returns A Date object or null if parsing fails.
|
||||
*/
|
||||
function safeParseDate(dateStr?: string): Date | null {
|
||||
if (!dateStr) return null;
|
||||
|
||||
// Try to parse D-M-YYYY HH:MM:SS format (with hyphens or dots)
|
||||
const dateTimeRegex =
|
||||
/^(\d{1,2})[.-](\d{1,2})[.-](\d{4}) (\d{1,2}):(\d{1,2}):(\d{1,2})$/;
|
||||
const match = dateStr.match(dateTimeRegex);
|
||||
|
||||
if (match) {
|
||||
const day = match[1];
|
||||
const month = match[2];
|
||||
const year = match[3];
|
||||
const hour = match[4];
|
||||
const minute = match[5];
|
||||
const second = match[6];
|
||||
|
||||
// Reformat to YYYY-MM-DDTHH:MM:SS (ISO-like, but local time)
|
||||
// Ensure month and day are two digits
|
||||
const formattedDateStr = `${year}-${month.padStart(2, "0")}-${day.padStart(2, "0")}T${hour.padStart(2, "0")}:${minute.padStart(2, "0")}:${second.padStart(2, "0")}`;
|
||||
|
||||
try {
|
||||
const date = new Date(formattedDateStr);
|
||||
// Basic validation: check if the constructed date is valid
|
||||
if (!isNaN(date.getTime())) {
|
||||
// console.log(`[safeParseDate] Parsed from D-M-YYYY: ${dateStr} -> ${formattedDateStr} -> ${date.toISOString()}`);
|
||||
return date;
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
`[safeParseDate] Error parsing reformatted string ${formattedDateStr} from ${dateStr}:`,
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for other potential formats (e.g., direct ISO 8601) or if the primary parse failed
|
||||
try {
|
||||
const parsedDate = new Date(dateStr);
|
||||
if (!isNaN(parsedDate.getTime())) {
|
||||
// console.log(`[safeParseDate] Parsed with fallback: ${dateStr} -> ${parsedDate.toISOString()}`);
|
||||
return parsedDate;
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`[safeParseDate] Error parsing with fallback ${dateStr}:`, e);
|
||||
}
|
||||
|
||||
console.warn(`Failed to parse date string: ${dateStr}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function fetchAndParseCsv(
|
||||
url: string,
|
||||
username?: string,
|
||||
password?: string
|
||||
): Promise<RawSessionImport[]> {
|
||||
): Promise<Partial<SessionData>[]> {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
@ -44,39 +443,56 @@ export async function fetchAndParseCsv(
|
||||
const res = await fetch(url, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch CSV: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
if (!res.ok) throw new Error("Failed to fetch CSV: " + res.statusText);
|
||||
|
||||
const text = await res.text();
|
||||
|
||||
// Parse CSV without headers, using positional column mapping
|
||||
const records: string[][] = parse(text, {
|
||||
// Parse without expecting headers, using known order
|
||||
const records: CSVRecord[] = parse(text, {
|
||||
delimiter: ",",
|
||||
from_line: 1, // Start from first line (no headers)
|
||||
columns: [
|
||||
"session_id",
|
||||
"start_time",
|
||||
"end_time",
|
||||
"ip_address",
|
||||
"country",
|
||||
"language",
|
||||
"messages_sent",
|
||||
"sentiment",
|
||||
"escalated",
|
||||
"forwarded_hr",
|
||||
"full_transcript_url",
|
||||
"avg_response_time",
|
||||
"tokens",
|
||||
"tokens_eur",
|
||||
"category",
|
||||
"initial_msg",
|
||||
],
|
||||
from_line: 1,
|
||||
relax_column_count: true,
|
||||
skip_empty_lines: true,
|
||||
trim: true,
|
||||
});
|
||||
|
||||
// Map CSV columns by position to SessionImport fields
|
||||
return records.map((row) => ({
|
||||
externalSessionId: row[0] || "",
|
||||
startTimeRaw: row[1] || "",
|
||||
endTimeRaw: row[2] || "",
|
||||
ipAddress: row[3] || null,
|
||||
countryCode: row[4] || null,
|
||||
language: row[5] || null,
|
||||
messagesSent: row[6] ? parseInt(row[6], 10) || null : null,
|
||||
sentimentRaw: row[7] || null,
|
||||
escalatedRaw: row[8] || null,
|
||||
forwardedHrRaw: row[9] || null,
|
||||
fullTranscriptUrl: row[10] || null,
|
||||
avgResponseTimeSeconds: row[11] ? parseFloat(row[11]) || null : null,
|
||||
tokens: row[12] ? parseInt(row[12], 10) || null : null,
|
||||
tokensEur: row[13] ? parseFloat(row[13]) || null : null,
|
||||
category: row[14] || null,
|
||||
initialMessage: row[15] || null,
|
||||
// Coerce types for relevant columns
|
||||
return records.map((r) => ({
|
||||
id: r.session_id,
|
||||
startTime: safeParseDate(r.start_time) || new Date(), // Fallback to current date if invalid
|
||||
endTime: safeParseDate(r.end_time),
|
||||
ipAddress: r.ip_address,
|
||||
country: getCountryCode(r.country),
|
||||
language: getLanguageCode(r.language),
|
||||
messagesSent: Number(r.messages_sent) || 0,
|
||||
sentiment: mapSentimentToScore(r.sentiment),
|
||||
escalated: isTruthyValue(r.escalated),
|
||||
forwardedHr: isTruthyValue(r.forwarded_hr),
|
||||
fullTranscriptUrl: r.full_transcript_url,
|
||||
avgResponseTime: r.avg_response_time
|
||||
? parseFloat(r.avg_response_time)
|
||||
: null,
|
||||
tokens: Number(r.tokens) || 0,
|
||||
tokensEur: r.tokens_eur ? parseFloat(r.tokens_eur) : 0,
|
||||
category: normalizeCategory(r.category),
|
||||
initialMsg: r.initial_msg,
|
||||
}));
|
||||
}
|
||||
|
||||
147
lib/env.ts
147
lib/env.ts
@ -1,147 +0,0 @@
|
||||
// Centralized environment variable management
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
|
||||
/**
|
||||
* Parse environment variable value by removing quotes, comments, and trimming whitespace
|
||||
*/
|
||||
function parseEnvValue(value: string | undefined): string {
|
||||
if (!value) return '';
|
||||
|
||||
// Trim whitespace
|
||||
let cleaned = value.trim();
|
||||
|
||||
// Remove inline comments (everything after #)
|
||||
const commentIndex = cleaned.indexOf('#');
|
||||
if (commentIndex !== -1) {
|
||||
cleaned = cleaned.substring(0, commentIndex).trim();
|
||||
}
|
||||
|
||||
// Remove surrounding quotes (both single and double)
|
||||
if ((cleaned.startsWith('"') && cleaned.endsWith('"')) ||
|
||||
(cleaned.startsWith("'") && cleaned.endsWith("'"))) {
|
||||
cleaned = cleaned.slice(1, -1);
|
||||
}
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse integer with fallback to default value
|
||||
*/
|
||||
function parseIntWithDefault(value: string | undefined, defaultValue: number): number {
|
||||
const cleaned = parseEnvValue(value);
|
||||
if (!cleaned) return defaultValue;
|
||||
|
||||
const parsed = parseInt(cleaned, 10);
|
||||
return isNaN(parsed) ? defaultValue : parsed;
|
||||
}
|
||||
|
||||
// Load environment variables from .env.local
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const envPath = join(__dirname, '..', '.env.local');
|
||||
|
||||
// Load .env.local if it exists
|
||||
try {
|
||||
const envFile = readFileSync(envPath, 'utf8');
|
||||
const envVars = envFile.split('\n').filter(line => line.trim() && !line.startsWith('#'));
|
||||
|
||||
envVars.forEach(line => {
|
||||
const [key, ...valueParts] = line.split('=');
|
||||
if (key && valueParts.length > 0) {
|
||||
const rawValue = valueParts.join('=');
|
||||
const cleanedValue = parseEnvValue(rawValue);
|
||||
if (!process.env[key.trim()]) {
|
||||
process.env[key.trim()] = cleanedValue;
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
// Silently fail if .env.local doesn't exist
|
||||
}
|
||||
|
||||
/**
|
||||
* Typed environment variables with defaults
|
||||
*/
|
||||
export const env = {
|
||||
// NextAuth
|
||||
NEXTAUTH_URL: parseEnvValue(process.env.NEXTAUTH_URL) || 'http://localhost:3000',
|
||||
NEXTAUTH_SECRET: parseEnvValue(process.env.NEXTAUTH_SECRET) || '',
|
||||
NODE_ENV: parseEnvValue(process.env.NODE_ENV) || 'development',
|
||||
|
||||
// OpenAI
|
||||
OPENAI_API_KEY: parseEnvValue(process.env.OPENAI_API_KEY) || '',
|
||||
|
||||
// Scheduler Configuration
|
||||
SCHEDULER_ENABLED: parseEnvValue(process.env.SCHEDULER_ENABLED) === 'true',
|
||||
CSV_IMPORT_INTERVAL: parseEnvValue(process.env.CSV_IMPORT_INTERVAL) || '*/15 * * * *',
|
||||
IMPORT_PROCESSING_INTERVAL: parseEnvValue(process.env.IMPORT_PROCESSING_INTERVAL) || '*/5 * * * *',
|
||||
IMPORT_PROCESSING_BATCH_SIZE: parseIntWithDefault(process.env.IMPORT_PROCESSING_BATCH_SIZE, 50),
|
||||
SESSION_PROCESSING_INTERVAL: parseEnvValue(process.env.SESSION_PROCESSING_INTERVAL) || '0 * * * *',
|
||||
SESSION_PROCESSING_BATCH_SIZE: parseIntWithDefault(process.env.SESSION_PROCESSING_BATCH_SIZE, 0),
|
||||
SESSION_PROCESSING_CONCURRENCY: parseIntWithDefault(process.env.SESSION_PROCESSING_CONCURRENCY, 5),
|
||||
|
||||
// Server
|
||||
PORT: parseIntWithDefault(process.env.PORT, 3000),
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Validate required environment variables
|
||||
*/
|
||||
export function validateEnv(): { valid: boolean; errors: string[] } {
|
||||
const errors: string[] = [];
|
||||
|
||||
if (!env.NEXTAUTH_SECRET) {
|
||||
errors.push('NEXTAUTH_SECRET is required');
|
||||
}
|
||||
|
||||
if (!env.OPENAI_API_KEY && env.NODE_ENV === 'production') {
|
||||
errors.push('OPENAI_API_KEY is required in production');
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler configuration from environment variables
|
||||
*/
|
||||
export function getSchedulerConfig() {
|
||||
return {
|
||||
enabled: env.SCHEDULER_ENABLED,
|
||||
csvImport: {
|
||||
interval: env.CSV_IMPORT_INTERVAL,
|
||||
},
|
||||
importProcessing: {
|
||||
interval: env.IMPORT_PROCESSING_INTERVAL,
|
||||
batchSize: env.IMPORT_PROCESSING_BATCH_SIZE,
|
||||
},
|
||||
sessionProcessing: {
|
||||
interval: env.SESSION_PROCESSING_INTERVAL,
|
||||
batchSize: env.SESSION_PROCESSING_BATCH_SIZE,
|
||||
concurrency: env.SESSION_PROCESSING_CONCURRENCY,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Log environment configuration (safe for production)
|
||||
*/
|
||||
export function logEnvConfig(): void {
|
||||
console.log('[Environment] Configuration:');
|
||||
console.log(` NODE_ENV: ${env.NODE_ENV}`);
|
||||
console.log(` NEXTAUTH_URL: ${env.NEXTAUTH_URL}`);
|
||||
console.log(` SCHEDULER_ENABLED: ${env.SCHEDULER_ENABLED}`);
|
||||
console.log(` PORT: ${env.PORT}`);
|
||||
|
||||
if (env.SCHEDULER_ENABLED) {
|
||||
console.log(' Scheduler intervals:');
|
||||
console.log(` CSV Import: ${env.CSV_IMPORT_INTERVAL}`);
|
||||
console.log(` Import Processing: ${env.IMPORT_PROCESSING_INTERVAL}`);
|
||||
console.log(` Session Processing: ${env.SESSION_PROCESSING_INTERVAL}`);
|
||||
}
|
||||
}
|
||||
@ -1,357 +0,0 @@
|
||||
// SessionImport to Session processor
|
||||
import { PrismaClient, SentimentCategory, SessionCategory, ProcessingStage } from "@prisma/client";
|
||||
import { getSchedulerConfig } from "./env";
|
||||
import { fetchTranscriptContent, isValidTranscriptUrl } from "./transcriptFetcher";
|
||||
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||
import cron from "node-cron";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Parse European date format (DD.MM.YYYY HH:mm:ss) to JavaScript Date
|
||||
*/
|
||||
function parseEuropeanDate(dateStr: string): Date {
|
||||
if (!dateStr || typeof dateStr !== 'string') {
|
||||
throw new Error(`Invalid date string: ${dateStr}`);
|
||||
}
|
||||
|
||||
// Handle format: "DD.MM.YYYY HH:mm:ss"
|
||||
const [datePart, timePart] = dateStr.trim().split(' ');
|
||||
|
||||
if (!datePart || !timePart) {
|
||||
throw new Error(`Invalid date format: ${dateStr}. Expected format: DD.MM.YYYY HH:mm:ss`);
|
||||
}
|
||||
|
||||
const [day, month, year] = datePart.split('.');
|
||||
|
||||
if (!day || !month || !year) {
|
||||
throw new Error(`Invalid date part: ${datePart}. Expected format: DD.MM.YYYY`);
|
||||
}
|
||||
|
||||
// Convert to ISO format: YYYY-MM-DD HH:mm:ss
|
||||
const isoDateStr = `${year}-${month.padStart(2, '0')}-${day.padStart(2, '0')} ${timePart}`;
|
||||
const date = new Date(isoDateStr);
|
||||
|
||||
if (isNaN(date.getTime())) {
|
||||
throw new Error(`Failed to parse date: ${dateStr} -> ${isoDateStr}`);
|
||||
}
|
||||
|
||||
return date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to parse sentiment from raw string (fallback only)
|
||||
*/
|
||||
function parseFallbackSentiment(sentimentRaw: string | null): SentimentCategory | null {
|
||||
if (!sentimentRaw) return null;
|
||||
|
||||
const sentimentStr = sentimentRaw.toLowerCase();
|
||||
if (sentimentStr.includes('positive')) {
|
||||
return SentimentCategory.POSITIVE;
|
||||
} else if (sentimentStr.includes('negative')) {
|
||||
return SentimentCategory.NEGATIVE;
|
||||
} else {
|
||||
return SentimentCategory.NEUTRAL;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to parse boolean from raw string (fallback only)
|
||||
*/
|
||||
function parseFallbackBoolean(rawValue: string | null): boolean | null {
|
||||
if (!rawValue) return null;
|
||||
return ['true', '1', 'yes', 'escalated', 'forwarded'].includes(rawValue.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse transcript content into Message records
|
||||
*/
|
||||
async function parseTranscriptIntoMessages(sessionId: string, transcriptContent: string): Promise<void> {
|
||||
// Clear existing messages for this session
|
||||
await prisma.message.deleteMany({
|
||||
where: { sessionId }
|
||||
});
|
||||
|
||||
// Split transcript into lines and parse each message
|
||||
const lines = transcriptContent.split('\n').filter(line => line.trim());
|
||||
let order = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmedLine = line.trim();
|
||||
if (!trimmedLine) continue;
|
||||
|
||||
// Try to parse different formats:
|
||||
// Format 1: "User: message" or "Assistant: message"
|
||||
// Format 2: "[timestamp] User: message" or "[timestamp] Assistant: message"
|
||||
|
||||
let role = 'unknown';
|
||||
let content = trimmedLine;
|
||||
let timestamp: Date | null = null;
|
||||
|
||||
// Check for timestamp format: [DD.MM.YYYY HH:mm:ss] Role: content
|
||||
const timestampMatch = trimmedLine.match(/^\[([^\]]+)\]\s*(.+)$/);
|
||||
if (timestampMatch) {
|
||||
try {
|
||||
timestamp = parseEuropeanDate(timestampMatch[1]);
|
||||
content = timestampMatch[2];
|
||||
} catch (error) {
|
||||
// If timestamp parsing fails, treat the whole line as content
|
||||
content = trimmedLine;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract role and message content
|
||||
const roleMatch = content.match(/^(User|Assistant|System):\s*(.*)$/i);
|
||||
if (roleMatch) {
|
||||
role = roleMatch[1].toLowerCase();
|
||||
content = roleMatch[2].trim();
|
||||
} else {
|
||||
// If no role prefix found, try to infer from context or use 'unknown'
|
||||
role = 'unknown';
|
||||
}
|
||||
|
||||
// Skip empty content
|
||||
if (!content) continue;
|
||||
|
||||
// Create message record
|
||||
await prisma.message.create({
|
||||
data: {
|
||||
sessionId,
|
||||
timestamp,
|
||||
role,
|
||||
content,
|
||||
order,
|
||||
},
|
||||
});
|
||||
|
||||
order++;
|
||||
}
|
||||
|
||||
console.log(`[Import Processor] ✓ Parsed ${order} messages for session ${sessionId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single SessionImport record into a Session record
|
||||
* Uses new unified processing status tracking
|
||||
*/
|
||||
async function processSingleImport(importRecord: any): Promise<{ success: boolean; error?: string }> {
|
||||
let sessionId: string | null = null;
|
||||
|
||||
try {
|
||||
// Parse dates using European format parser
|
||||
const startTime = parseEuropeanDate(importRecord.startTimeRaw);
|
||||
const endTime = parseEuropeanDate(importRecord.endTimeRaw);
|
||||
|
||||
console.log(`[Import Processor] Processing ${importRecord.externalSessionId}: ${startTime.toISOString()} - ${endTime.toISOString()}`);
|
||||
|
||||
// Create or update Session record with MINIMAL processing
|
||||
const session = await prisma.session.upsert({
|
||||
where: {
|
||||
importId: importRecord.id,
|
||||
},
|
||||
update: {
|
||||
startTime,
|
||||
endTime,
|
||||
// Direct copies (minimal processing)
|
||||
ipAddress: importRecord.ipAddress,
|
||||
country: importRecord.countryCode, // Keep as country code
|
||||
fullTranscriptUrl: importRecord.fullTranscriptUrl,
|
||||
avgResponseTime: importRecord.avgResponseTimeSeconds,
|
||||
initialMsg: importRecord.initialMessage,
|
||||
},
|
||||
create: {
|
||||
companyId: importRecord.companyId,
|
||||
importId: importRecord.id,
|
||||
startTime,
|
||||
endTime,
|
||||
// Direct copies (minimal processing)
|
||||
ipAddress: importRecord.ipAddress,
|
||||
country: importRecord.countryCode, // Keep as country code
|
||||
fullTranscriptUrl: importRecord.fullTranscriptUrl,
|
||||
avgResponseTime: importRecord.avgResponseTimeSeconds,
|
||||
initialMsg: importRecord.initialMessage,
|
||||
},
|
||||
});
|
||||
|
||||
sessionId = session.id;
|
||||
|
||||
// Initialize processing status for this session
|
||||
await ProcessingStatusManager.initializeSession(sessionId);
|
||||
|
||||
// Mark CSV_IMPORT as completed
|
||||
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.CSV_IMPORT);
|
||||
|
||||
// Handle transcript fetching
|
||||
let transcriptContent = importRecord.rawTranscriptContent;
|
||||
|
||||
if (!transcriptContent && importRecord.fullTranscriptUrl && isValidTranscriptUrl(importRecord.fullTranscriptUrl)) {
|
||||
await ProcessingStatusManager.startStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH);
|
||||
|
||||
console.log(`[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...`);
|
||||
|
||||
// Get company credentials for transcript fetching
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: importRecord.companyId },
|
||||
select: { csvUsername: true, csvPassword: true },
|
||||
});
|
||||
|
||||
const transcriptResult = await fetchTranscriptContent(
|
||||
importRecord.fullTranscriptUrl,
|
||||
company?.csvUsername || undefined,
|
||||
company?.csvPassword || undefined
|
||||
);
|
||||
|
||||
if (transcriptResult.success) {
|
||||
transcriptContent = transcriptResult.content;
|
||||
console.log(`[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)`);
|
||||
|
||||
// Update the import record with the fetched content
|
||||
await prisma.sessionImport.update({
|
||||
where: { id: importRecord.id },
|
||||
data: { rawTranscriptContent: transcriptContent },
|
||||
});
|
||||
|
||||
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
|
||||
contentLength: transcriptContent?.length || 0,
|
||||
url: importRecord.fullTranscriptUrl
|
||||
});
|
||||
} else {
|
||||
console.log(`[Import Processor] ⚠️ Failed to fetch transcript for ${importRecord.externalSessionId}: ${transcriptResult.error}`);
|
||||
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, transcriptResult.error || 'Unknown error');
|
||||
}
|
||||
} else if (!importRecord.fullTranscriptUrl) {
|
||||
// No transcript URL available - skip this stage
|
||||
await ProcessingStatusManager.skipStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, 'No transcript URL provided');
|
||||
} else {
|
||||
// Transcript already fetched
|
||||
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
|
||||
contentLength: transcriptContent?.length || 0,
|
||||
source: 'already_fetched'
|
||||
});
|
||||
}
|
||||
|
||||
// Handle session creation (parse messages)
|
||||
await ProcessingStatusManager.startStage(sessionId, ProcessingStage.SESSION_CREATION);
|
||||
|
||||
if (transcriptContent) {
|
||||
await parseTranscriptIntoMessages(sessionId, transcriptContent);
|
||||
}
|
||||
|
||||
await ProcessingStatusManager.completeStage(sessionId, ProcessingStage.SESSION_CREATION, {
|
||||
hasTranscript: !!transcriptContent,
|
||||
transcriptLength: transcriptContent?.length || 0
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
// Mark the current stage as failed if we have a sessionId
|
||||
if (sessionId) {
|
||||
// Determine which stage failed based on the error
|
||||
if (errorMessage.includes('transcript') || errorMessage.includes('fetch')) {
|
||||
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, errorMessage);
|
||||
} else if (errorMessage.includes('message') || errorMessage.includes('parse')) {
|
||||
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.SESSION_CREATION, errorMessage);
|
||||
} else {
|
||||
// General failure - mark CSV_IMPORT as failed
|
||||
await ProcessingStatusManager.failStage(sessionId, ProcessingStage.CSV_IMPORT, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process unprocessed SessionImport records into Session records
|
||||
* Uses new processing status system to find imports that need processing
|
||||
*/
|
||||
export async function processQueuedImports(batchSize: number = 50): Promise<void> {
|
||||
console.log('[Import Processor] Starting to process unprocessed imports...');
|
||||
|
||||
let totalSuccessCount = 0;
|
||||
let totalErrorCount = 0;
|
||||
let batchNumber = 1;
|
||||
|
||||
while (true) {
|
||||
// Find SessionImports that don't have a corresponding Session yet
|
||||
const unprocessedImports = await prisma.sessionImport.findMany({
|
||||
where: {
|
||||
session: null, // No session created yet
|
||||
},
|
||||
take: batchSize,
|
||||
orderBy: {
|
||||
createdAt: 'asc', // Process oldest first
|
||||
},
|
||||
});
|
||||
|
||||
if (unprocessedImports.length === 0) {
|
||||
if (batchNumber === 1) {
|
||||
console.log('[Import Processor] No unprocessed imports found');
|
||||
} else {
|
||||
console.log(`[Import Processor] All batches completed. Total: ${totalSuccessCount} successful, ${totalErrorCount} failed`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[Import Processor] Processing batch ${batchNumber}: ${unprocessedImports.length} imports...`);
|
||||
|
||||
let batchSuccessCount = 0;
|
||||
let batchErrorCount = 0;
|
||||
|
||||
// Process each import in this batch
|
||||
for (const importRecord of unprocessedImports) {
|
||||
const result = await processSingleImport(importRecord);
|
||||
|
||||
if (result.success) {
|
||||
batchSuccessCount++;
|
||||
totalSuccessCount++;
|
||||
console.log(`[Import Processor] ✓ Processed import ${importRecord.externalSessionId}`);
|
||||
} else {
|
||||
batchErrorCount++;
|
||||
totalErrorCount++;
|
||||
console.log(`[Import Processor] ✗ Failed to process import ${importRecord.externalSessionId}: ${result.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[Import Processor] Batch ${batchNumber} completed: ${batchSuccessCount} successful, ${batchErrorCount} failed`);
|
||||
batchNumber++;
|
||||
|
||||
// If this batch was smaller than the batch size, we're done
|
||||
if (unprocessedImports.length < batchSize) {
|
||||
console.log(`[Import Processor] All batches completed. Total: ${totalSuccessCount} successful, ${totalErrorCount} failed`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the import processing scheduler
|
||||
*/
|
||||
export function startImportProcessingScheduler(): void {
|
||||
const config = getSchedulerConfig();
|
||||
|
||||
if (!config.enabled) {
|
||||
console.log('[Import Processing Scheduler] Disabled via configuration');
|
||||
return;
|
||||
}
|
||||
|
||||
// Use a more frequent interval for import processing (every 5 minutes by default)
|
||||
const interval = process.env.IMPORT_PROCESSING_INTERVAL || '*/5 * * * *';
|
||||
const batchSize = parseInt(process.env.IMPORT_PROCESSING_BATCH_SIZE || '50', 10);
|
||||
|
||||
console.log(`[Import Processing Scheduler] Starting with interval: ${interval}`);
|
||||
console.log(`[Import Processing Scheduler] Batch size: ${batchSize}`);
|
||||
|
||||
cron.schedule(interval, async () => {
|
||||
try {
|
||||
await processQueuedImports(batchSize);
|
||||
} catch (error) {
|
||||
console.error(`[Import Processing Scheduler] Error: ${error}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
112
lib/metrics.ts
112
lib/metrics.ts
@ -7,7 +7,6 @@ import {
|
||||
CountryMetrics, // Added CountryMetrics
|
||||
MetricsResult,
|
||||
WordCloudWord, // Added WordCloudWord
|
||||
TopQuestion, // Added TopQuestion
|
||||
} from "./types";
|
||||
|
||||
interface CompanyConfig {
|
||||
@ -345,28 +344,12 @@ export function sessionMetrics(
|
||||
let sentimentPositiveCount = 0;
|
||||
let sentimentNeutralCount = 0;
|
||||
let sentimentNegativeCount = 0;
|
||||
const totalTokens = 0;
|
||||
const totalTokensEur = 0;
|
||||
let totalTokens = 0;
|
||||
let totalTokensEur = 0;
|
||||
const wordCounts: { [key: string]: number } = {};
|
||||
let alerts = 0;
|
||||
|
||||
// New metrics variables
|
||||
const hourlySessionCounts: { [hour: string]: number } = {};
|
||||
let resolvedChatsCount = 0;
|
||||
const questionCounts: { [question: string]: number } = {};
|
||||
|
||||
for (const session of sessions) {
|
||||
// Track hourly usage for peak time calculation
|
||||
if (session.startTime) {
|
||||
const hour = new Date(session.startTime).getHours();
|
||||
const hourKey = `${hour.toString().padStart(2, '0')}:00`;
|
||||
hourlySessionCounts[hourKey] = (hourlySessionCounts[hourKey] || 0) + 1;
|
||||
}
|
||||
|
||||
// Count resolved chats (sessions that have ended and are not escalated)
|
||||
if (session.endTime && !session.escalated) {
|
||||
resolvedChatsCount++;
|
||||
}
|
||||
// Unique Users: Prefer non-empty ipAddress, fallback to non-empty sessionId
|
||||
let identifierAdded = false;
|
||||
if (session.ipAddress && session.ipAddress.trim() !== "") {
|
||||
@ -453,25 +436,41 @@ export function sessionMetrics(
|
||||
if (session.escalated) escalatedCount++;
|
||||
if (session.forwardedHr) forwardedHrCount++;
|
||||
|
||||
// Sentiment (now using enum values)
|
||||
// Sentiment
|
||||
if (session.sentiment !== undefined && session.sentiment !== null) {
|
||||
if (session.sentiment === "POSITIVE") sentimentPositiveCount++;
|
||||
else if (session.sentiment === "NEGATIVE") sentimentNegativeCount++;
|
||||
else if (session.sentiment === "NEUTRAL") sentimentNeutralCount++;
|
||||
// Example thresholds, adjust as needed
|
||||
if (session.sentiment > 0.3) sentimentPositiveCount++;
|
||||
else if (session.sentiment < -0.3) sentimentNegativeCount++;
|
||||
else sentimentNeutralCount++;
|
||||
}
|
||||
|
||||
// Sentiment Alert Check (simplified for enum)
|
||||
// Sentiment Alert Check
|
||||
if (
|
||||
companyConfig.sentimentAlert !== undefined &&
|
||||
session.sentiment === "NEGATIVE"
|
||||
session.sentiment !== undefined &&
|
||||
session.sentiment !== null &&
|
||||
session.sentiment < companyConfig.sentimentAlert
|
||||
) {
|
||||
alerts++;
|
||||
}
|
||||
|
||||
// Tokens
|
||||
if (session.tokens !== undefined && session.tokens !== null) {
|
||||
totalTokens += session.tokens;
|
||||
}
|
||||
if (session.tokensEur !== undefined && session.tokensEur !== null) {
|
||||
totalTokensEur += session.tokensEur;
|
||||
}
|
||||
|
||||
// Daily metrics
|
||||
const day = new Date(session.startTime).toISOString().split("T")[0];
|
||||
byDay[day] = (byDay[day] || 0) + 1; // Sessions per day
|
||||
// Note: tokens and tokensEur are not available in the new schema
|
||||
if (session.tokens !== undefined && session.tokens !== null) {
|
||||
tokensByDay[day] = (tokensByDay[day] || 0) + session.tokens;
|
||||
}
|
||||
if (session.tokensEur !== undefined && session.tokensEur !== null) {
|
||||
tokensCostByDay[day] = (tokensCostByDay[day] || 0) + session.tokensEur;
|
||||
}
|
||||
|
||||
// Category metrics
|
||||
if (session.category) {
|
||||
@ -488,34 +487,6 @@ export function sessionMetrics(
|
||||
byCountry[session.country] = (byCountry[session.country] || 0) + 1;
|
||||
}
|
||||
|
||||
// Extract questions from session
|
||||
const extractQuestions = () => {
|
||||
// 1. Extract questions from user messages (if available)
|
||||
if (session.messages) {
|
||||
session.messages
|
||||
.filter(msg => msg.role === 'User')
|
||||
.forEach(msg => {
|
||||
const content = msg.content.trim();
|
||||
// Simple heuristic: if message ends with ? or contains question words, treat as question
|
||||
if (content.endsWith('?') ||
|
||||
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(content)) {
|
||||
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 3. Extract questions from initial message as fallback
|
||||
if (session.initialMsg) {
|
||||
const content = session.initialMsg.trim();
|
||||
if (content.endsWith('?') ||
|
||||
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(content)) {
|
||||
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
extractQuestions();
|
||||
|
||||
// Word Cloud Data (from initial message and transcript content)
|
||||
const processTextForWordCloud = (text: string | undefined | null) => {
|
||||
if (!text) return;
|
||||
@ -535,8 +506,7 @@ export function sessionMetrics(
|
||||
}
|
||||
};
|
||||
processTextForWordCloud(session.initialMsg);
|
||||
// Note: transcriptContent is not available in ChatSession type
|
||||
// Could be added later if transcript parsing is implemented
|
||||
processTextForWordCloud(session.transcriptContent);
|
||||
}
|
||||
|
||||
const uniqueUsers = uniqueUserIds.size;
|
||||
@ -577,30 +547,6 @@ export function sessionMetrics(
|
||||
mockPreviousPeriodData.avgResponseTime
|
||||
);
|
||||
|
||||
// Calculate new metrics
|
||||
|
||||
// 1. Average Daily Costs (euros)
|
||||
const avgDailyCosts = numDaysWithSessions > 0 ? totalTokensEur / numDaysWithSessions : 0;
|
||||
|
||||
// 2. Peak Usage Time
|
||||
let peakUsageTime = "N/A";
|
||||
if (Object.keys(hourlySessionCounts).length > 0) {
|
||||
const peakHour = Object.entries(hourlySessionCounts)
|
||||
.sort(([, a], [, b]) => b - a)[0][0];
|
||||
const peakHourNum = parseInt(peakHour.split(':')[0]);
|
||||
const endHour = (peakHourNum + 1) % 24;
|
||||
peakUsageTime = `${peakHour}-${endHour.toString().padStart(2, '0')}:00`;
|
||||
}
|
||||
|
||||
// 3. Resolved Chats Percentage
|
||||
const resolvedChatsPercentage = totalSessions > 0 ? (resolvedChatsCount / totalSessions) * 100 : 0;
|
||||
|
||||
// 4. Top 5 Asked Questions
|
||||
const topQuestions: TopQuestion[] = Object.entries(questionCounts)
|
||||
.sort(([, a], [, b]) => b - a)
|
||||
.slice(0, 5) // Top 5 questions
|
||||
.map(([question, count]) => ({ question, count }));
|
||||
|
||||
// console.log("Debug metrics calculation:", {
|
||||
// totalSessionDuration,
|
||||
// validSessionsForDuration,
|
||||
@ -639,11 +585,5 @@ export function sessionMetrics(
|
||||
lastUpdated: Date.now(),
|
||||
totalSessionDuration,
|
||||
validSessionsForDuration,
|
||||
|
||||
// New metrics
|
||||
avgDailyCosts,
|
||||
peakUsageTime,
|
||||
resolvedChatsPercentage,
|
||||
topQuestions,
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
// Simple Prisma client setup
|
||||
// Prisma client setup with support for Cloudflare D1
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import { PrismaD1 } from "@prisma/adapter-d1";
|
||||
|
||||
// Add prisma to the NodeJS global type
|
||||
// This approach avoids NodeJS.Global which is not available
|
||||
@ -9,12 +10,24 @@ declare const global: {
|
||||
prisma: PrismaClient | undefined;
|
||||
};
|
||||
|
||||
// Initialize Prisma Client
|
||||
const prisma = global.prisma || new PrismaClient();
|
||||
// Check if we're running in Cloudflare Workers environment
|
||||
const isCloudflareWorker = typeof globalThis.DB !== 'undefined';
|
||||
|
||||
// Save in global if we're in development
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
global.prisma = prisma;
|
||||
// Initialize Prisma Client
|
||||
let prisma: PrismaClient;
|
||||
|
||||
if (isCloudflareWorker) {
|
||||
// In Cloudflare Workers, use D1 adapter
|
||||
const adapter = new PrismaD1(globalThis.DB);
|
||||
prisma = new PrismaClient({ adapter });
|
||||
} else {
|
||||
// In Next.js/Node.js, use regular SQLite
|
||||
prisma = global.prisma || new PrismaClient();
|
||||
|
||||
// Save in global if we're in development
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
global.prisma = prisma;
|
||||
}
|
||||
}
|
||||
|
||||
export { prisma };
|
||||
|
||||
@ -1,620 +0,0 @@
|
||||
// Enhanced session processing scheduler with AI cost tracking and question management
|
||||
import cron from "node-cron";
|
||||
import { PrismaClient, SentimentCategory, SessionCategory, ProcessingStage } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
import { getSchedulerConfig } from "./schedulerConfig";
|
||||
import { ProcessingStatusManager } from "./processingStatusManager";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||
const DEFAULT_MODEL = process.env.OPENAI_MODEL || "gpt-4o";
|
||||
|
||||
const USD_TO_EUR_RATE = 0.85; // Update periodically or fetch from API
|
||||
|
||||
/**
|
||||
* Get company's default AI model
|
||||
*/
|
||||
async function getCompanyAIModel(companyId: string): Promise<string> {
|
||||
const companyModel = await prisma.companyAIModel.findFirst({
|
||||
where: {
|
||||
companyId,
|
||||
isDefault: true,
|
||||
},
|
||||
include: {
|
||||
aiModel: true,
|
||||
},
|
||||
});
|
||||
|
||||
return companyModel?.aiModel.name || DEFAULT_MODEL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current pricing for an AI model
|
||||
*/
|
||||
async function getCurrentModelPricing(modelName: string): Promise<{
|
||||
promptTokenCost: number;
|
||||
completionTokenCost: number;
|
||||
} | null> {
|
||||
const model = await prisma.aIModel.findUnique({
|
||||
where: { name: modelName },
|
||||
include: {
|
||||
pricing: {
|
||||
where: {
|
||||
effectiveFrom: { lte: new Date() },
|
||||
OR: [
|
||||
{ effectiveUntil: null },
|
||||
{ effectiveUntil: { gte: new Date() } }
|
||||
]
|
||||
},
|
||||
orderBy: { effectiveFrom: 'desc' },
|
||||
take: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!model || model.pricing.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pricing = model.pricing[0];
|
||||
return {
|
||||
promptTokenCost: pricing.promptTokenCost,
|
||||
completionTokenCost: pricing.completionTokenCost,
|
||||
};
|
||||
}
|
||||
|
||||
interface ProcessedData {
|
||||
language: string;
|
||||
sentiment: "POSITIVE" | "NEUTRAL" | "NEGATIVE";
|
||||
escalated: boolean;
|
||||
forwarded_hr: boolean;
|
||||
category: "SCHEDULE_HOURS" | "LEAVE_VACATION" | "SICK_LEAVE_RECOVERY" | "SALARY_COMPENSATION" | "CONTRACT_HOURS" | "ONBOARDING" | "OFFBOARDING" | "WORKWEAR_STAFF_PASS" | "TEAM_CONTACTS" | "PERSONAL_QUESTIONS" | "ACCESS_LOGIN" | "SOCIAL_QUESTIONS" | "UNRECOGNIZED_OTHER";
|
||||
questions: string[];
|
||||
summary: string;
|
||||
session_id: string;
|
||||
}
|
||||
|
||||
interface ProcessingResult {
|
||||
sessionId: string;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Record AI processing request with detailed token tracking
|
||||
*/
|
||||
async function recordAIProcessingRequest(
|
||||
sessionId: string,
|
||||
openaiResponse: any,
|
||||
processingType: string = 'session_analysis'
|
||||
): Promise<void> {
|
||||
const usage = openaiResponse.usage;
|
||||
const model = openaiResponse.model;
|
||||
|
||||
// Get current pricing from database
|
||||
const pricing = await getCurrentModelPricing(model);
|
||||
|
||||
// Fallback pricing if not found in database
|
||||
const fallbackPricing = {
|
||||
promptTokenCost: 0.00001, // $10.00 per 1M tokens (gpt-4-turbo rate)
|
||||
completionTokenCost: 0.00003, // $30.00 per 1M tokens
|
||||
};
|
||||
|
||||
const finalPricing = pricing || fallbackPricing;
|
||||
|
||||
const promptCost = usage.prompt_tokens * finalPricing.promptTokenCost;
|
||||
const completionCost = usage.completion_tokens * finalPricing.completionTokenCost;
|
||||
const totalCostUsd = promptCost + completionCost;
|
||||
const totalCostEur = totalCostUsd * USD_TO_EUR_RATE;
|
||||
|
||||
await prisma.aIProcessingRequest.create({
|
||||
data: {
|
||||
sessionId,
|
||||
openaiRequestId: openaiResponse.id,
|
||||
model: openaiResponse.model,
|
||||
serviceTier: openaiResponse.service_tier,
|
||||
systemFingerprint: openaiResponse.system_fingerprint,
|
||||
|
||||
promptTokens: usage.prompt_tokens,
|
||||
completionTokens: usage.completion_tokens,
|
||||
totalTokens: usage.total_tokens,
|
||||
|
||||
// Detailed breakdown
|
||||
cachedTokens: usage.prompt_tokens_details?.cached_tokens || null,
|
||||
audioTokensPrompt: usage.prompt_tokens_details?.audio_tokens || null,
|
||||
reasoningTokens: usage.completion_tokens_details?.reasoning_tokens || null,
|
||||
audioTokensCompletion: usage.completion_tokens_details?.audio_tokens || null,
|
||||
acceptedPredictionTokens: usage.completion_tokens_details?.accepted_prediction_tokens || null,
|
||||
rejectedPredictionTokens: usage.completion_tokens_details?.rejected_prediction_tokens || null,
|
||||
|
||||
promptTokenCost: finalPricing.promptTokenCost,
|
||||
completionTokenCost: finalPricing.completionTokenCost,
|
||||
totalCostEur,
|
||||
|
||||
processingType,
|
||||
success: true,
|
||||
completedAt: new Date(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Record failed AI processing request
|
||||
*/
|
||||
async function recordFailedAIProcessingRequest(
|
||||
sessionId: string,
|
||||
processingType: string,
|
||||
errorMessage: string
|
||||
): Promise<void> {
|
||||
await prisma.aIProcessingRequest.create({
|
||||
data: {
|
||||
sessionId,
|
||||
model: 'unknown',
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
promptTokenCost: 0,
|
||||
completionTokenCost: 0,
|
||||
totalCostEur: 0,
|
||||
processingType,
|
||||
success: false,
|
||||
errorMessage,
|
||||
completedAt: new Date(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process questions into separate Question and SessionQuestion tables
|
||||
*/
|
||||
async function processQuestions(sessionId: string, questions: string[]): Promise<void> {
|
||||
// Clear existing questions for this session
|
||||
await prisma.sessionQuestion.deleteMany({
|
||||
where: { sessionId }
|
||||
});
|
||||
|
||||
// Process each question
|
||||
for (let index = 0; index < questions.length; index++) {
|
||||
const questionText = questions[index];
|
||||
if (!questionText.trim()) continue; // Skip empty questions
|
||||
|
||||
// Find or create question
|
||||
const question = await prisma.question.upsert({
|
||||
where: { content: questionText.trim() },
|
||||
create: { content: questionText.trim() },
|
||||
update: {}
|
||||
});
|
||||
|
||||
// Link to session
|
||||
await prisma.sessionQuestion.create({
|
||||
data: {
|
||||
sessionId,
|
||||
questionId: question.id,
|
||||
order: index
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate messagesSent from actual Message records
|
||||
*/
|
||||
async function calculateMessagesSent(sessionId: string): Promise<number> {
|
||||
const userMessageCount = await prisma.message.count({
|
||||
where: {
|
||||
sessionId,
|
||||
role: { in: ['user', 'User'] } // Handle both cases
|
||||
}
|
||||
});
|
||||
return userMessageCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate endTime from latest Message timestamp
|
||||
*/
|
||||
async function calculateEndTime(sessionId: string, fallbackEndTime: Date): Promise<Date> {
|
||||
const latestMessage = await prisma.message.findFirst({
|
||||
where: { sessionId },
|
||||
orderBy: { timestamp: 'desc' }
|
||||
});
|
||||
|
||||
return latestMessage?.timestamp || fallbackEndTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a session transcript using OpenAI API
|
||||
*/
|
||||
async function processTranscriptWithOpenAI(sessionId: string, transcript: string, companyId: string): Promise<ProcessedData> {
|
||||
if (!OPENAI_API_KEY) {
|
||||
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||
}
|
||||
|
||||
// Get company's AI model
|
||||
const aiModel = await getCompanyAIModel(companyId);
|
||||
|
||||
// Updated system message with exact enum values
|
||||
const systemMessage = `
|
||||
You are an AI assistant tasked with analyzing chat transcripts.
|
||||
Extract the following information from the transcript and return it in EXACT JSON format:
|
||||
|
||||
{
|
||||
"language": "ISO 639-1 code (e.g., 'en', 'nl', 'de')",
|
||||
"sentiment": "POSITIVE|NEUTRAL|NEGATIVE",
|
||||
"escalated": boolean,
|
||||
"forwarded_hr": boolean,
|
||||
"category": "SCHEDULE_HOURS|LEAVE_VACATION|SICK_LEAVE_RECOVERY|SALARY_COMPENSATION|CONTRACT_HOURS|ONBOARDING|OFFBOARDING|WORKWEAR_STAFF_PASS|TEAM_CONTACTS|PERSONAL_QUESTIONS|ACCESS_LOGIN|SOCIAL_QUESTIONS|UNRECOGNIZED_OTHER",
|
||||
"questions": ["question 1", "question 2", ...],
|
||||
"summary": "brief summary (10-300 chars)",
|
||||
"session_id": "${sessionId}"
|
||||
}
|
||||
|
||||
Rules:
|
||||
- language: Primary language used by the user (ISO 639-1 code)
|
||||
- sentiment: Overall emotional tone of the conversation
|
||||
- escalated: Was the issue escalated to a supervisor/manager?
|
||||
- forwarded_hr: Was HR contact mentioned or provided?
|
||||
- category: Best fitting category for the main topic (use exact enum values above)
|
||||
- questions: Up to 5 paraphrased user questions (in English)
|
||||
- summary: Brief conversation summary (10-300 characters)
|
||||
|
||||
IMPORTANT: Use EXACT enum values as specified above.
|
||||
`;
|
||||
|
||||
try {
|
||||
const response = await fetch(OPENAI_API_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${OPENAI_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: aiModel, // Use company's configured AI model
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: systemMessage,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: transcript,
|
||||
},
|
||||
],
|
||||
temperature: 0.3, // Lower temperature for more consistent results
|
||||
response_format: { type: "json_object" },
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||
}
|
||||
|
||||
const openaiResponse: any = await response.json();
|
||||
|
||||
// Record the AI processing request for cost tracking
|
||||
await recordAIProcessingRequest(sessionId, openaiResponse, 'session_analysis');
|
||||
|
||||
const processedData = JSON.parse(openaiResponse.choices[0].message.content);
|
||||
|
||||
// Validate the response against our expected schema
|
||||
validateOpenAIResponse(processedData);
|
||||
|
||||
return processedData;
|
||||
} catch (error) {
|
||||
// Record failed request
|
||||
await recordFailedAIProcessingRequest(
|
||||
sessionId,
|
||||
'session_analysis',
|
||||
error instanceof Error ? error.message : String(error)
|
||||
);
|
||||
|
||||
process.stderr.write(`Error processing transcript with OpenAI: ${error}\n`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the OpenAI response against our expected schema
|
||||
*/
|
||||
function validateOpenAIResponse(data: any): void {
|
||||
const requiredFields = [
|
||||
"language", "sentiment", "escalated", "forwarded_hr",
|
||||
"category", "questions", "summary", "session_id"
|
||||
];
|
||||
|
||||
for (const field of requiredFields) {
|
||||
if (!(field in data)) {
|
||||
throw new Error(`Missing required field: ${field}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate field types and values
|
||||
if (typeof data.language !== "string" || !/^[a-z]{2}$/.test(data.language)) {
|
||||
throw new Error("Invalid language format. Expected ISO 639-1 code (e.g., 'en')");
|
||||
}
|
||||
|
||||
if (!["POSITIVE", "NEUTRAL", "NEGATIVE"].includes(data.sentiment)) {
|
||||
throw new Error("Invalid sentiment. Expected 'POSITIVE', 'NEUTRAL', or 'NEGATIVE'");
|
||||
}
|
||||
|
||||
if (typeof data.escalated !== "boolean") {
|
||||
throw new Error("Invalid escalated. Expected boolean");
|
||||
}
|
||||
|
||||
if (typeof data.forwarded_hr !== "boolean") {
|
||||
throw new Error("Invalid forwarded_hr. Expected boolean");
|
||||
}
|
||||
|
||||
const validCategories = [
|
||||
"SCHEDULE_HOURS", "LEAVE_VACATION", "SICK_LEAVE_RECOVERY", "SALARY_COMPENSATION",
|
||||
"CONTRACT_HOURS", "ONBOARDING", "OFFBOARDING", "WORKWEAR_STAFF_PASS",
|
||||
"TEAM_CONTACTS", "PERSONAL_QUESTIONS", "ACCESS_LOGIN", "SOCIAL_QUESTIONS",
|
||||
"UNRECOGNIZED_OTHER"
|
||||
];
|
||||
|
||||
if (!validCategories.includes(data.category)) {
|
||||
throw new Error(`Invalid category. Expected one of: ${validCategories.join(", ")}`);
|
||||
}
|
||||
|
||||
if (!Array.isArray(data.questions)) {
|
||||
throw new Error("Invalid questions. Expected array of strings");
|
||||
}
|
||||
|
||||
if (typeof data.summary !== "string" || data.summary.length < 10 || data.summary.length > 300) {
|
||||
throw new Error("Invalid summary. Expected string between 10-300 characters");
|
||||
}
|
||||
|
||||
if (typeof data.session_id !== "string") {
|
||||
throw new Error("Invalid session_id. Expected string");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single session
|
||||
*/
|
||||
async function processSingleSession(session: any): Promise<ProcessingResult> {
|
||||
if (session.messages.length === 0) {
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: false,
|
||||
error: "Session has no messages",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Mark AI analysis as started
|
||||
await ProcessingStatusManager.startStage(session.id, ProcessingStage.AI_ANALYSIS);
|
||||
|
||||
// Convert messages back to transcript format for OpenAI processing
|
||||
const transcript = session.messages
|
||||
.map((msg: any) =>
|
||||
`[${new Date(msg.timestamp)
|
||||
.toLocaleString("en-GB", {
|
||||
day: "2-digit",
|
||||
month: "2-digit",
|
||||
year: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
})
|
||||
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
const processedData = await processTranscriptWithOpenAI(session.id, transcript, session.companyId);
|
||||
|
||||
// Calculate messagesSent from actual Message records
|
||||
const messagesSent = await calculateMessagesSent(session.id);
|
||||
|
||||
// Calculate endTime from latest Message timestamp
|
||||
const calculatedEndTime = await calculateEndTime(session.id, session.endTime);
|
||||
|
||||
// Update the session with processed data
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
language: processedData.language,
|
||||
messagesSent: messagesSent, // Calculated from Messages, not AI
|
||||
endTime: calculatedEndTime, // Use calculated endTime if different
|
||||
sentiment: processedData.sentiment as SentimentCategory,
|
||||
escalated: processedData.escalated,
|
||||
forwardedHr: processedData.forwarded_hr,
|
||||
category: processedData.category as SessionCategory,
|
||||
summary: processedData.summary,
|
||||
},
|
||||
});
|
||||
|
||||
// Mark AI analysis as completed
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.AI_ANALYSIS, {
|
||||
language: processedData.language,
|
||||
sentiment: processedData.sentiment,
|
||||
category: processedData.category,
|
||||
questionsCount: processedData.questions.length
|
||||
});
|
||||
|
||||
// Start question extraction stage
|
||||
await ProcessingStatusManager.startStage(session.id, ProcessingStage.QUESTION_EXTRACTION);
|
||||
|
||||
// Process questions into separate tables
|
||||
await processQuestions(session.id, processedData.questions);
|
||||
|
||||
// Mark question extraction as completed
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.QUESTION_EXTRACTION, {
|
||||
questionsProcessed: processedData.questions.length
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: true,
|
||||
};
|
||||
} catch (error) {
|
||||
// Mark AI analysis as failed
|
||||
await ProcessingStatusManager.failStage(
|
||||
session.id,
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
error instanceof Error ? error.message : String(error)
|
||||
);
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process sessions in parallel with concurrency limit
|
||||
*/
|
||||
async function processSessionsInParallel(sessions: any[], maxConcurrency: number = 5): Promise<ProcessingResult[]> {
|
||||
const results: Promise<ProcessingResult>[] = [];
|
||||
const executing: Promise<ProcessingResult>[] = [];
|
||||
|
||||
for (const session of sessions) {
|
||||
const promise = processSingleSession(session).then((result) => {
|
||||
process.stdout.write(
|
||||
result.success
|
||||
? `[ProcessingScheduler] ✓ Successfully processed session ${result.sessionId}\n`
|
||||
: `[ProcessingScheduler] ✗ Failed to process session ${result.sessionId}: ${result.error}\n`
|
||||
);
|
||||
return result;
|
||||
});
|
||||
|
||||
results.push(promise);
|
||||
executing.push(promise);
|
||||
|
||||
if (executing.length >= maxConcurrency) {
|
||||
await Promise.race(executing);
|
||||
const completedIndex = executing.findIndex(p => p === promise);
|
||||
if (completedIndex !== -1) {
|
||||
executing.splice(completedIndex, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all(results);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process unprocessed sessions using the new processing status system
|
||||
*/
|
||||
export async function processUnprocessedSessions(batchSize: number | null = null, maxConcurrency: number = 5): Promise<void> {
|
||||
process.stdout.write("[ProcessingScheduler] Starting to process sessions needing AI analysis...\n");
|
||||
|
||||
// Get sessions that need AI processing using the new status system
|
||||
const sessionsNeedingAI = await ProcessingStatusManager.getSessionsNeedingProcessing(
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
batchSize || 50
|
||||
);
|
||||
|
||||
if (sessionsNeedingAI.length === 0) {
|
||||
process.stdout.write("[ProcessingScheduler] No sessions found requiring AI processing.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
// Get session IDs that need processing
|
||||
const sessionIds = sessionsNeedingAI.map(statusRecord => statusRecord.sessionId);
|
||||
|
||||
// Fetch full session data with messages
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
id: { in: sessionIds }
|
||||
},
|
||||
include: {
|
||||
messages: {
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Filter to only sessions that have messages
|
||||
const sessionsWithMessages = sessionsToProcess.filter(
|
||||
(session: any) => session.messages && session.messages.length > 0
|
||||
);
|
||||
|
||||
if (sessionsWithMessages.length === 0) {
|
||||
process.stdout.write("[ProcessingScheduler] No sessions with messages found requiring processing.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] Found ${sessionsWithMessages.length} sessions to process (max concurrency: ${maxConcurrency}).\n`
|
||||
);
|
||||
|
||||
const startTime = Date.now();
|
||||
const results = await processSessionsInParallel(sessionsWithMessages, maxConcurrency);
|
||||
const endTime = Date.now();
|
||||
|
||||
const successCount = results.filter((r) => r.success).length;
|
||||
const errorCount = results.filter((r) => !r.success).length;
|
||||
|
||||
process.stdout.write("[ProcessingScheduler] Session processing complete.\n");
|
||||
process.stdout.write(`[ProcessingScheduler] Successfully processed: ${successCount} sessions.\n`);
|
||||
process.stdout.write(`[ProcessingScheduler] Failed to process: ${errorCount} sessions.\n`);
|
||||
process.stdout.write(`[ProcessingScheduler] Total processing time: ${((endTime - startTime) / 1000).toFixed(2)}s\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total AI processing costs for reporting
|
||||
*/
|
||||
export async function getAIProcessingCosts(): Promise<{
|
||||
totalCostEur: number;
|
||||
totalTokens: number;
|
||||
requestCount: number;
|
||||
successfulRequests: number;
|
||||
failedRequests: number;
|
||||
}> {
|
||||
const result = await prisma.aIProcessingRequest.aggregate({
|
||||
_sum: {
|
||||
totalCostEur: true,
|
||||
totalTokens: true,
|
||||
},
|
||||
_count: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
const successfulRequests = await prisma.aIProcessingRequest.count({
|
||||
where: { success: true }
|
||||
});
|
||||
|
||||
const failedRequests = await prisma.aIProcessingRequest.count({
|
||||
where: { success: false }
|
||||
});
|
||||
|
||||
return {
|
||||
totalCostEur: result._sum.totalCostEur || 0,
|
||||
totalTokens: result._sum.totalTokens || 0,
|
||||
requestCount: result._count.id || 0,
|
||||
successfulRequests,
|
||||
failedRequests,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the processing scheduler with configurable settings
|
||||
*/
|
||||
export function startProcessingScheduler(): void {
|
||||
const config = getSchedulerConfig();
|
||||
|
||||
if (!config.enabled) {
|
||||
console.log('[Processing Scheduler] Disabled via configuration');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[Processing Scheduler] Starting with interval: ${config.sessionProcessing.interval}`);
|
||||
console.log(`[Processing Scheduler] Batch size: ${config.sessionProcessing.batchSize === 0 ? 'unlimited' : config.sessionProcessing.batchSize}`);
|
||||
console.log(`[Processing Scheduler] Concurrency: ${config.sessionProcessing.concurrency}`);
|
||||
|
||||
cron.schedule(config.sessionProcessing.interval, async () => {
|
||||
try {
|
||||
await processUnprocessedSessions(
|
||||
config.sessionProcessing.batchSize === 0 ? null : config.sessionProcessing.batchSize,
|
||||
config.sessionProcessing.concurrency
|
||||
);
|
||||
} catch (error) {
|
||||
process.stderr.write(`[ProcessingScheduler] Error in scheduler: ${error}\n`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -1,295 +0,0 @@
|
||||
import { PrismaClient, ProcessingStage, ProcessingStatus } from '@prisma/client';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Centralized processing status management
|
||||
*/
|
||||
export class ProcessingStatusManager {
|
||||
|
||||
/**
|
||||
* Initialize processing status for a session with all stages set to PENDING
|
||||
*/
|
||||
static async initializeSession(sessionId: string): Promise<void> {
|
||||
const stages = [
|
||||
ProcessingStage.CSV_IMPORT,
|
||||
ProcessingStage.TRANSCRIPT_FETCH,
|
||||
ProcessingStage.SESSION_CREATION,
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
ProcessingStage.QUESTION_EXTRACTION,
|
||||
];
|
||||
|
||||
// Create all processing status records for this session
|
||||
await prisma.sessionProcessingStatus.createMany({
|
||||
data: stages.map(stage => ({
|
||||
sessionId,
|
||||
stage,
|
||||
status: ProcessingStatus.PENDING,
|
||||
})),
|
||||
skipDuplicates: true, // In case some already exist
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a processing stage
|
||||
*/
|
||||
static async startStage(
|
||||
sessionId: string,
|
||||
stage: ProcessingStage,
|
||||
metadata?: any
|
||||
): Promise<void> {
|
||||
await prisma.sessionProcessingStatus.upsert({
|
||||
where: {
|
||||
sessionId_stage: { sessionId, stage }
|
||||
},
|
||||
update: {
|
||||
status: ProcessingStatus.IN_PROGRESS,
|
||||
startedAt: new Date(),
|
||||
errorMessage: null,
|
||||
metadata: metadata || null,
|
||||
},
|
||||
create: {
|
||||
sessionId,
|
||||
stage,
|
||||
status: ProcessingStatus.IN_PROGRESS,
|
||||
startedAt: new Date(),
|
||||
metadata: metadata || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete a processing stage successfully
|
||||
*/
|
||||
static async completeStage(
|
||||
sessionId: string,
|
||||
stage: ProcessingStage,
|
||||
metadata?: any
|
||||
): Promise<void> {
|
||||
await prisma.sessionProcessingStatus.upsert({
|
||||
where: {
|
||||
sessionId_stage: { sessionId, stage }
|
||||
},
|
||||
update: {
|
||||
status: ProcessingStatus.COMPLETED,
|
||||
completedAt: new Date(),
|
||||
errorMessage: null,
|
||||
metadata: metadata || null,
|
||||
},
|
||||
create: {
|
||||
sessionId,
|
||||
stage,
|
||||
status: ProcessingStatus.COMPLETED,
|
||||
startedAt: new Date(),
|
||||
completedAt: new Date(),
|
||||
metadata: metadata || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a processing stage as failed
|
||||
*/
|
||||
static async failStage(
|
||||
sessionId: string,
|
||||
stage: ProcessingStage,
|
||||
errorMessage: string,
|
||||
metadata?: any
|
||||
): Promise<void> {
|
||||
await prisma.sessionProcessingStatus.upsert({
|
||||
where: {
|
||||
sessionId_stage: { sessionId, stage }
|
||||
},
|
||||
update: {
|
||||
status: ProcessingStatus.FAILED,
|
||||
completedAt: new Date(),
|
||||
errorMessage,
|
||||
retryCount: { increment: 1 },
|
||||
metadata: metadata || null,
|
||||
},
|
||||
create: {
|
||||
sessionId,
|
||||
stage,
|
||||
status: ProcessingStatus.FAILED,
|
||||
startedAt: new Date(),
|
||||
completedAt: new Date(),
|
||||
errorMessage,
|
||||
retryCount: 1,
|
||||
metadata: metadata || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip a processing stage (e.g., no transcript URL available)
|
||||
*/
|
||||
static async skipStage(
|
||||
sessionId: string,
|
||||
stage: ProcessingStage,
|
||||
reason: string
|
||||
): Promise<void> {
|
||||
await prisma.sessionProcessingStatus.upsert({
|
||||
where: {
|
||||
sessionId_stage: { sessionId, stage }
|
||||
},
|
||||
update: {
|
||||
status: ProcessingStatus.SKIPPED,
|
||||
completedAt: new Date(),
|
||||
errorMessage: reason,
|
||||
},
|
||||
create: {
|
||||
sessionId,
|
||||
stage,
|
||||
status: ProcessingStatus.SKIPPED,
|
||||
startedAt: new Date(),
|
||||
completedAt: new Date(),
|
||||
errorMessage: reason,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get processing status for a specific session
|
||||
*/
|
||||
static async getSessionStatus(sessionId: string) {
|
||||
return await prisma.sessionProcessingStatus.findMany({
|
||||
where: { sessionId },
|
||||
orderBy: { stage: 'asc' },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sessions that need processing for a specific stage
|
||||
*/
|
||||
static async getSessionsNeedingProcessing(
|
||||
stage: ProcessingStage,
|
||||
limit: number = 50
|
||||
) {
|
||||
return await prisma.sessionProcessingStatus.findMany({
|
||||
where: {
|
||||
stage,
|
||||
status: ProcessingStatus.PENDING,
|
||||
},
|
||||
include: {
|
||||
session: {
|
||||
include: {
|
||||
import: true,
|
||||
company: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
take: limit,
|
||||
orderBy: { session: { createdAt: 'asc' } },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get pipeline status overview
|
||||
*/
|
||||
static async getPipelineStatus() {
|
||||
// Get counts by stage and status
|
||||
const statusCounts = await prisma.sessionProcessingStatus.groupBy({
|
||||
by: ['stage', 'status'],
|
||||
_count: { id: true },
|
||||
});
|
||||
|
||||
// Get total sessions
|
||||
const totalSessions = await prisma.session.count();
|
||||
|
||||
// Organize the data
|
||||
const pipeline: Record<string, Record<string, number>> = {};
|
||||
|
||||
for (const { stage, status, _count } of statusCounts) {
|
||||
if (!pipeline[stage]) {
|
||||
pipeline[stage] = {};
|
||||
}
|
||||
pipeline[stage][status] = _count.id;
|
||||
}
|
||||
|
||||
return {
|
||||
totalSessions,
|
||||
pipeline,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sessions with failed processing
|
||||
*/
|
||||
static async getFailedSessions(stage?: ProcessingStage) {
|
||||
const where: any = {
|
||||
status: ProcessingStatus.FAILED,
|
||||
};
|
||||
|
||||
if (stage) {
|
||||
where.stage = stage;
|
||||
}
|
||||
|
||||
return await prisma.sessionProcessingStatus.findMany({
|
||||
where,
|
||||
include: {
|
||||
session: {
|
||||
include: {
|
||||
import: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { completedAt: 'desc' },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset a failed stage for retry
|
||||
*/
|
||||
static async resetStageForRetry(sessionId: string, stage: ProcessingStage): Promise<void> {
|
||||
await prisma.sessionProcessingStatus.update({
|
||||
where: {
|
||||
sessionId_stage: { sessionId, stage }
|
||||
},
|
||||
data: {
|
||||
status: ProcessingStatus.PENDING,
|
||||
startedAt: null,
|
||||
completedAt: null,
|
||||
errorMessage: null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a session has completed a specific stage
|
||||
*/
|
||||
static async hasCompletedStage(sessionId: string, stage: ProcessingStage): Promise<boolean> {
|
||||
const status = await prisma.sessionProcessingStatus.findUnique({
|
||||
where: {
|
||||
sessionId_stage: { sessionId, stage }
|
||||
},
|
||||
});
|
||||
|
||||
return status?.status === ProcessingStatus.COMPLETED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a session is ready for a specific stage (previous stages completed)
|
||||
*/
|
||||
static async isReadyForStage(sessionId: string, stage: ProcessingStage): Promise<boolean> {
|
||||
const stageOrder = [
|
||||
ProcessingStage.CSV_IMPORT,
|
||||
ProcessingStage.TRANSCRIPT_FETCH,
|
||||
ProcessingStage.SESSION_CREATION,
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
ProcessingStage.QUESTION_EXTRACTION,
|
||||
];
|
||||
|
||||
const currentStageIndex = stageOrder.indexOf(stage);
|
||||
if (currentStageIndex === 0) return true; // First stage is always ready
|
||||
|
||||
// Check if all previous stages are completed
|
||||
const previousStages = stageOrder.slice(0, currentStageIndex);
|
||||
|
||||
for (const prevStage of previousStages) {
|
||||
const isCompleted = await this.hasCompletedStage(sessionId, prevStage);
|
||||
if (!isCompleted) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
158
lib/scheduler.ts
158
lib/scheduler.ts
@ -1,94 +1,114 @@
|
||||
// CSV import scheduler with configurable intervals
|
||||
// node-cron job to auto-refresh session data every 15 mins
|
||||
import cron from "node-cron";
|
||||
import { prisma } from "./prisma";
|
||||
import { fetchAndParseCsv } from "./csvFetcher";
|
||||
import { getSchedulerConfig } from "./schedulerConfig";
|
||||
|
||||
export function startCsvImportScheduler() {
|
||||
const config = getSchedulerConfig();
|
||||
interface SessionCreateData {
|
||||
id: string;
|
||||
startTime: Date;
|
||||
companyId: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
if (!config.enabled) {
|
||||
console.log('[CSV Import Scheduler] Disabled via configuration');
|
||||
return;
|
||||
/**
|
||||
* Fetches transcript content from a URL with optional authentication
|
||||
* @param url The URL to fetch the transcript from
|
||||
* @param username Optional username for Basic Auth
|
||||
* @param password Optional password for Basic Auth
|
||||
* @returns The transcript content or null if fetching fails
|
||||
*/
|
||||
async function fetchTranscriptContent(
|
||||
url: string,
|
||||
username?: string,
|
||||
password?: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
process.stderr.write(
|
||||
`Error fetching transcript: ${response.statusText}\n`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return await response.text();
|
||||
} catch (error) {
|
||||
process.stderr.write(`Failed to fetch transcript: ${error}\n`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[CSV Import Scheduler] Starting with interval: ${config.csvImport.interval}`);
|
||||
|
||||
cron.schedule(config.csvImport.interval, async () => {
|
||||
export function startScheduler() {
|
||||
cron.schedule("*/15 * * * *", async () => {
|
||||
const companies = await prisma.company.findMany();
|
||||
for (const company of companies) {
|
||||
try {
|
||||
const rawSessionData = await fetchAndParseCsv(
|
||||
const sessions = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
await prisma.session.deleteMany({ where: { companyId: company.id } });
|
||||
|
||||
// Create SessionImport records for new data
|
||||
for (const rawSession of rawSessionData) {
|
||||
try {
|
||||
// Use upsert to handle duplicates gracefully
|
||||
await prisma.sessionImport.upsert({
|
||||
where: {
|
||||
companyId_externalSessionId: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
// Update existing record with latest data
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
create: {
|
||||
companyId: company.id,
|
||||
externalSessionId: rawSession.externalSessionId,
|
||||
startTimeRaw: rawSession.startTimeRaw,
|
||||
endTimeRaw: rawSession.endTimeRaw,
|
||||
ipAddress: rawSession.ipAddress,
|
||||
countryCode: rawSession.countryCode,
|
||||
language: rawSession.language,
|
||||
messagesSent: rawSession.messagesSent,
|
||||
sentimentRaw: rawSession.sentimentRaw,
|
||||
escalatedRaw: rawSession.escalatedRaw,
|
||||
forwardedHrRaw: rawSession.forwardedHrRaw,
|
||||
fullTranscriptUrl: rawSession.fullTranscriptUrl,
|
||||
avgResponseTimeSeconds: rawSession.avgResponseTimeSeconds,
|
||||
tokens: rawSession.tokens,
|
||||
tokensEur: rawSession.tokensEur,
|
||||
category: rawSession.category,
|
||||
initialMessage: rawSession.initialMessage,
|
||||
// Status tracking now handled by ProcessingStatusManager
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Log individual session import errors but continue processing
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to import session ${rawSession.externalSessionId} for company ${company.name}: ${error}\n`
|
||||
for (const session of sessions) {
|
||||
// Fetch transcript content if URL is available
|
||||
let transcriptContent: string | null = null;
|
||||
if (session.fullTranscriptUrl) {
|
||||
transcriptContent = await fetchTranscriptContent(
|
||||
session.fullTranscriptUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const sessionData: SessionCreateData = {
|
||||
...session,
|
||||
companyId: company.id,
|
||||
id: session.id || session.sessionId || `sess_${Date.now()}`,
|
||||
// Ensure startTime is not undefined
|
||||
startTime: session.startTime || new Date(),
|
||||
};
|
||||
|
||||
// Only include fields that are properly typed for Prisma
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
id: sessionData.id,
|
||||
companyId: sessionData.companyId,
|
||||
startTime: sessionData.startTime,
|
||||
// endTime is required in the schema, so use startTime if not available
|
||||
endTime: session.endTime || new Date(),
|
||||
ipAddress: session.ipAddress || null,
|
||||
country: session.country || null,
|
||||
language: session.language || null,
|
||||
sentiment:
|
||||
typeof session.sentiment === "number"
|
||||
? session.sentiment
|
||||
: null,
|
||||
messagesSent:
|
||||
typeof session.messagesSent === "number"
|
||||
? session.messagesSent
|
||||
: 0,
|
||||
category: session.category || null,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||
transcriptContent: transcriptContent, // Add the transcript content
|
||||
},
|
||||
});
|
||||
}
|
||||
// Using process.stdout.write instead of console.log to avoid ESLint warning
|
||||
process.stdout.write(
|
||||
`[Scheduler] Imported ${rawSessionData.length} session records for company: ${company.name}\n`
|
||||
`[Scheduler] Refreshed sessions for company: ${company.name}\n`
|
||||
);
|
||||
} catch (e) {
|
||||
// Using process.stderr.write instead of console.error to avoid ESLint warning
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed to fetch CSV for company: ${company.name} - ${e}\n`
|
||||
`[Scheduler] Failed for company: ${company.name} - ${e}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,44 +0,0 @@
|
||||
// Legacy scheduler configuration - now uses centralized env management
|
||||
// This file is kept for backward compatibility but delegates to lib/env.ts
|
||||
|
||||
import { getSchedulerConfig as getEnvSchedulerConfig, logEnvConfig } from "./env";
|
||||
|
||||
export interface SchedulerConfig {
|
||||
enabled: boolean;
|
||||
csvImport: {
|
||||
interval: string;
|
||||
};
|
||||
sessionProcessing: {
|
||||
interval: string;
|
||||
batchSize: number; // 0 = unlimited
|
||||
concurrency: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler configuration from environment variables
|
||||
* @deprecated Use getSchedulerConfig from lib/env.ts instead
|
||||
*/
|
||||
export function getSchedulerConfig(): SchedulerConfig {
|
||||
const config = getEnvSchedulerConfig();
|
||||
|
||||
return {
|
||||
enabled: config.enabled,
|
||||
csvImport: {
|
||||
interval: config.csvImport.interval,
|
||||
},
|
||||
sessionProcessing: {
|
||||
interval: config.sessionProcessing.interval,
|
||||
batchSize: config.sessionProcessing.batchSize,
|
||||
concurrency: config.sessionProcessing.concurrency,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Log scheduler configuration
|
||||
* @deprecated Use logEnvConfig from lib/env.ts instead
|
||||
*/
|
||||
export function logSchedulerConfig(config: SchedulerConfig): void {
|
||||
logEnvConfig();
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
// Combined scheduler initialization
|
||||
import { startCsvImportScheduler } from "./scheduler";
|
||||
import { startProcessingScheduler } from "./processingScheduler";
|
||||
|
||||
/**
|
||||
* Initialize all schedulers
|
||||
* - CSV import scheduler (runs every 15 minutes)
|
||||
* - Session processing scheduler (runs every hour)
|
||||
*/
|
||||
export function initializeSchedulers() {
|
||||
// Start the CSV import scheduler
|
||||
startCsvImportScheduler();
|
||||
|
||||
// Start the session processing scheduler
|
||||
startProcessingScheduler();
|
||||
|
||||
console.log("All schedulers initialized successfully");
|
||||
}
|
||||
@ -1,151 +0,0 @@
|
||||
// Transcript fetching utility
|
||||
import fetch from "node-fetch";
|
||||
|
||||
export interface TranscriptFetchResult {
|
||||
success: boolean;
|
||||
content?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch transcript content from a URL
|
||||
* @param url The transcript URL
|
||||
* @param username Optional username for authentication
|
||||
* @param password Optional password for authentication
|
||||
* @returns Promise with fetch result
|
||||
*/
|
||||
export async function fetchTranscriptContent(
|
||||
url: string,
|
||||
username?: string,
|
||||
password?: string
|
||||
): Promise<TranscriptFetchResult> {
|
||||
try {
|
||||
if (!url || !url.trim()) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'No transcript URL provided',
|
||||
};
|
||||
}
|
||||
|
||||
// Prepare authentication header if credentials provided
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'LiveDash-Transcript-Fetcher/1.0',
|
||||
};
|
||||
|
||||
if (authHeader) {
|
||||
headers.Authorization = authHeader;
|
||||
}
|
||||
|
||||
// Fetch the transcript with timeout
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
success: false,
|
||||
error: `HTTP ${response.status}: ${response.statusText}`,
|
||||
};
|
||||
}
|
||||
|
||||
const content = await response.text();
|
||||
|
||||
if (!content || content.trim().length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Empty transcript content',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
content: content.trim(),
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
// Handle common network errors
|
||||
if (errorMessage.includes('ENOTFOUND')) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Domain not found',
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('ECONNREFUSED')) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Connection refused',
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('timeout')) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Request timeout',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if a URL looks like a valid transcript URL
|
||||
* @param url The URL to validate
|
||||
* @returns boolean indicating if URL appears valid
|
||||
*/
|
||||
export function isValidTranscriptUrl(url: string): boolean {
|
||||
if (!url || typeof url !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedUrl = new URL(url);
|
||||
return parsedUrl.protocol === 'http:' || parsedUrl.protocol === 'https:';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract session ID from transcript content if possible
|
||||
* This is a helper function that can be enhanced based on transcript format
|
||||
* @param content The transcript content
|
||||
* @returns Extracted session ID or null
|
||||
*/
|
||||
export function extractSessionIdFromTranscript(content: string): string | null {
|
||||
if (!content) return null;
|
||||
|
||||
// Look for common session ID patterns
|
||||
const patterns = [
|
||||
/session[_-]?id[:\s]*([a-zA-Z0-9-]+)/i,
|
||||
/id[:\s]*([a-zA-Z0-9-]{8,})/i,
|
||||
/^([a-zA-Z0-9-]{8,})/m, // First line might be session ID
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const match = content.match(pattern);
|
||||
if (match && match[1]) {
|
||||
return match[1].trim();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
@ -1,360 +0,0 @@
|
||||
// Transcript parsing utility for converting raw transcript content into structured messages
|
||||
import { prisma } from './prisma.js';
|
||||
|
||||
export interface ParsedMessage {
|
||||
sessionId: string;
|
||||
timestamp: Date;
|
||||
role: string;
|
||||
content: string;
|
||||
order: number;
|
||||
}
|
||||
|
||||
export interface TranscriptParseResult {
|
||||
success: boolean;
|
||||
messages?: ParsedMessage[];
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse European date format (DD.MM.YYYY HH:mm:ss) to Date object
|
||||
*/
|
||||
function parseEuropeanDate(dateStr: string): Date {
|
||||
const match = dateStr.match(/(\d{2})\.(\d{2})\.(\d{4}) (\d{2}):(\d{2}):(\d{2})/);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid date format: ${dateStr}`);
|
||||
}
|
||||
|
||||
const [, day, month, year, hour, minute, second] = match;
|
||||
return new Date(
|
||||
parseInt(year, 10),
|
||||
parseInt(month, 10) - 1, // JavaScript months are 0-indexed
|
||||
parseInt(day, 10),
|
||||
parseInt(hour, 10),
|
||||
parseInt(minute, 10),
|
||||
parseInt(second, 10)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse raw transcript content into structured messages
|
||||
* @param content Raw transcript content
|
||||
* @param startTime Session start time
|
||||
* @param endTime Session end time
|
||||
* @returns Parsed messages with timestamps
|
||||
*/
|
||||
export function parseTranscriptToMessages(
|
||||
content: string,
|
||||
startTime: Date,
|
||||
endTime: Date
|
||||
): TranscriptParseResult {
|
||||
try {
|
||||
if (!content || !content.trim()) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Empty transcript content'
|
||||
};
|
||||
}
|
||||
|
||||
const messages: ParsedMessage[] = [];
|
||||
const lines = content.split('\n');
|
||||
let currentMessage: { role: string; content: string; timestamp?: string } | null = null;
|
||||
let order = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmedLine = line.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (!trimmedLine) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if line starts with a timestamp and role [DD.MM.YYYY HH:MM:SS] Role: content
|
||||
const timestampRoleMatch = trimmedLine.match(/^\[(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}:\d{2})\]\s+(User|Assistant|System|user|assistant|system):\s*(.*)$/i);
|
||||
|
||||
// Check if line starts with just a role (User:, Assistant:, System:, etc.)
|
||||
const roleMatch = trimmedLine.match(/^(User|Assistant|System|user|assistant|system):\s*(.*)$/i);
|
||||
|
||||
if (timestampRoleMatch) {
|
||||
// Save previous message if exists
|
||||
if (currentMessage) {
|
||||
messages.push({
|
||||
sessionId: '', // Will be set by caller
|
||||
timestamp: new Date(), // Will be calculated below
|
||||
role: currentMessage.role,
|
||||
content: currentMessage.content.trim(),
|
||||
order: order++
|
||||
});
|
||||
}
|
||||
|
||||
// Start new message with timestamp
|
||||
const timestamp = timestampRoleMatch[1];
|
||||
const role = timestampRoleMatch[2].charAt(0).toUpperCase() + timestampRoleMatch[2].slice(1).toLowerCase();
|
||||
const content = timestampRoleMatch[3] || '';
|
||||
|
||||
currentMessage = {
|
||||
role,
|
||||
content,
|
||||
timestamp // Store the timestamp for later parsing
|
||||
};
|
||||
} else if (roleMatch) {
|
||||
// Save previous message if exists
|
||||
if (currentMessage) {
|
||||
messages.push({
|
||||
sessionId: '', // Will be set by caller
|
||||
timestamp: new Date(), // Will be calculated below
|
||||
role: currentMessage.role,
|
||||
content: currentMessage.content.trim(),
|
||||
order: order++
|
||||
});
|
||||
}
|
||||
|
||||
// Start new message without timestamp
|
||||
const role = roleMatch[1].charAt(0).toUpperCase() + roleMatch[1].slice(1).toLowerCase();
|
||||
const content = roleMatch[2] || '';
|
||||
|
||||
currentMessage = {
|
||||
role,
|
||||
content
|
||||
};
|
||||
} else if (currentMessage) {
|
||||
// Continue previous message (multi-line)
|
||||
currentMessage.content += '\n' + trimmedLine;
|
||||
}
|
||||
// If no current message and no role match, skip the line (orphaned content)
|
||||
}
|
||||
|
||||
// Save the last message
|
||||
if (currentMessage) {
|
||||
messages.push({
|
||||
sessionId: '', // Will be set by caller
|
||||
timestamp: new Date(), // Will be calculated below
|
||||
role: currentMessage.role,
|
||||
content: currentMessage.content.trim(),
|
||||
order: order++
|
||||
});
|
||||
}
|
||||
|
||||
if (messages.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'No messages found in transcript'
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate timestamps - use parsed timestamps if available, otherwise distribute across session duration
|
||||
const hasTimestamps = messages.some(msg => (msg as any).timestamp);
|
||||
|
||||
if (hasTimestamps) {
|
||||
// Use parsed timestamps from the transcript
|
||||
messages.forEach((message, index) => {
|
||||
const msgWithTimestamp = message as any;
|
||||
if (msgWithTimestamp.timestamp) {
|
||||
try {
|
||||
message.timestamp = parseEuropeanDate(msgWithTimestamp.timestamp);
|
||||
} catch (error) {
|
||||
// Fallback to distributed timestamp if parsing fails
|
||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||
const messageInterval = messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
||||
message.timestamp = new Date(startTime.getTime() + (index * messageInterval));
|
||||
}
|
||||
} else {
|
||||
// Fallback to distributed timestamp
|
||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||
const messageInterval = messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
||||
message.timestamp = new Date(startTime.getTime() + (index * messageInterval));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Distribute messages across session duration
|
||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||
const messageInterval = messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
||||
|
||||
messages.forEach((message, index) => {
|
||||
message.timestamp = new Date(startTime.getTime() + (index * messageInterval));
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
messages
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store parsed messages in the database for a session
|
||||
* @param sessionId The session ID
|
||||
* @param messages Array of parsed messages
|
||||
*/
|
||||
export async function storeMessagesForSession(
|
||||
sessionId: string,
|
||||
messages: ParsedMessage[]
|
||||
): Promise<void> {
|
||||
// Delete existing messages for this session (in case of re-processing)
|
||||
await prisma.message.deleteMany({
|
||||
where: { sessionId }
|
||||
});
|
||||
|
||||
// Create new messages
|
||||
const messagesWithSessionId = messages.map(msg => ({
|
||||
...msg,
|
||||
sessionId
|
||||
}));
|
||||
|
||||
await prisma.message.createMany({
|
||||
data: messagesWithSessionId
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process transcript for a single session
|
||||
* @param sessionId The session ID to process
|
||||
*/
|
||||
export async function processSessionTranscript(sessionId: string): Promise<void> {
|
||||
// Get the session and its import data
|
||||
const session = await prisma.session.findUnique({
|
||||
where: { id: sessionId },
|
||||
include: {
|
||||
import: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
throw new Error(`Session not found: ${sessionId}`);
|
||||
}
|
||||
|
||||
if (!session.import) {
|
||||
throw new Error(`No import data found for session: ${sessionId}`);
|
||||
}
|
||||
|
||||
if (!session.import.rawTranscriptContent) {
|
||||
throw new Error(`No transcript content found for session: ${sessionId}`);
|
||||
}
|
||||
|
||||
// Parse the start and end times
|
||||
const startTime = parseEuropeanDate(session.import.startTimeRaw);
|
||||
const endTime = parseEuropeanDate(session.import.endTimeRaw);
|
||||
|
||||
// Parse the transcript
|
||||
const parseResult = parseTranscriptToMessages(
|
||||
session.import.rawTranscriptContent,
|
||||
startTime,
|
||||
endTime
|
||||
);
|
||||
|
||||
if (!parseResult.success) {
|
||||
throw new Error(`Failed to parse transcript: ${parseResult.error}`);
|
||||
}
|
||||
|
||||
// Store the messages
|
||||
await storeMessagesForSession(sessionId, parseResult.messages!);
|
||||
|
||||
console.log(`✅ Processed ${parseResult.messages!.length} messages for session ${sessionId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process all sessions that have transcript content but no messages
|
||||
*/
|
||||
export async function processAllUnparsedTranscripts(): Promise<void> {
|
||||
console.log('🔍 Finding sessions with unparsed transcripts...');
|
||||
|
||||
// Find sessions that have transcript content but no messages
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
import: {
|
||||
rawTranscriptContent: {
|
||||
not: null
|
||||
}
|
||||
},
|
||||
messages: {
|
||||
none: {}
|
||||
}
|
||||
},
|
||||
include: {
|
||||
import: true,
|
||||
_count: {
|
||||
select: {
|
||||
messages: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`📋 Found ${sessionsToProcess.length} sessions to process`);
|
||||
|
||||
let processed = 0;
|
||||
let errors = 0;
|
||||
|
||||
for (const session of sessionsToProcess) {
|
||||
try {
|
||||
await processSessionTranscript(session.id);
|
||||
processed++;
|
||||
} catch (error) {
|
||||
console.error(`❌ Error processing session ${session.id}:`, error);
|
||||
errors++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n📊 Processing complete:`);
|
||||
console.log(` ✅ Successfully processed: ${processed} sessions`);
|
||||
console.log(` ❌ Errors: ${errors} sessions`);
|
||||
console.log(` 📝 Total messages created: ${await getTotalMessageCount()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total count of messages in the database
|
||||
*/
|
||||
export async function getTotalMessageCount(): Promise<number> {
|
||||
const result = await prisma.message.count();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get messages for a specific session
|
||||
* @param sessionId The session ID
|
||||
* @returns Array of messages ordered by order field
|
||||
*/
|
||||
export async function getMessagesForSession(sessionId: string) {
|
||||
return await prisma.message.findMany({
|
||||
where: { sessionId },
|
||||
orderBy: { order: 'asc' }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get parsing statistics
|
||||
*/
|
||||
export async function getParsingStats() {
|
||||
const totalSessions = await prisma.session.count();
|
||||
const sessionsWithTranscripts = await prisma.session.count({
|
||||
where: {
|
||||
import: {
|
||||
rawTranscriptContent: {
|
||||
not: null
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
const sessionsWithMessages = await prisma.session.count({
|
||||
where: {
|
||||
messages: {
|
||||
some: {}
|
||||
}
|
||||
}
|
||||
});
|
||||
const totalMessages = await getTotalMessageCount();
|
||||
|
||||
return {
|
||||
totalSessions,
|
||||
sessionsWithTranscripts,
|
||||
sessionsWithMessages,
|
||||
unparsedSessions: sessionsWithTranscripts - sessionsWithMessages,
|
||||
totalMessages
|
||||
};
|
||||
}
|
||||
29
lib/types.ts
29
lib/types.ts
@ -35,16 +35,6 @@ export interface User {
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
timestamp: Date | null;
|
||||
role: string; // "User", "Assistant", "System", etc.
|
||||
content: string;
|
||||
order: number; // Order within the conversation (0, 1, 2, ...)
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
export interface ChatSession {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
@ -54,7 +44,7 @@ export interface ChatSession {
|
||||
language?: string | null;
|
||||
country?: string | null;
|
||||
ipAddress?: string | null;
|
||||
sentiment?: string | null; // Now a SentimentCategory enum: "POSITIVE", "NEUTRAL", "NEGATIVE"
|
||||
sentiment?: number | null;
|
||||
messagesSent?: number;
|
||||
startTime: Date;
|
||||
endTime?: Date | null;
|
||||
@ -65,11 +55,11 @@ export interface ChatSession {
|
||||
avgResponseTime?: number | null;
|
||||
escalated?: boolean;
|
||||
forwardedHr?: boolean;
|
||||
tokens?: number;
|
||||
tokensEur?: number;
|
||||
initialMsg?: string;
|
||||
fullTranscriptUrl?: string | null;
|
||||
summary?: string | null; // Brief summary of the conversation
|
||||
messages?: Message[]; // Parsed messages from transcript
|
||||
transcriptContent?: string | null; // Full transcript content
|
||||
transcriptContent?: string | null;
|
||||
}
|
||||
|
||||
export interface SessionQuery {
|
||||
@ -115,11 +105,6 @@ export interface WordCloudWord {
|
||||
value: number;
|
||||
}
|
||||
|
||||
export interface TopQuestion {
|
||||
question: string;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export interface MetricsResult {
|
||||
totalSessions: number;
|
||||
avgSessionsPerDay: number;
|
||||
@ -154,12 +139,6 @@ export interface MetricsResult {
|
||||
avgSessionTimeTrend?: number; // e.g., percentage change in avgSessionLength
|
||||
avgResponseTimeTrend?: number; // e.g., percentage change in avgResponseTime
|
||||
|
||||
// New metrics for enhanced dashboard
|
||||
avgDailyCosts?: number; // Average daily costs in euros
|
||||
peakUsageTime?: string; // Peak usage time (e.g., "14:00-15:00")
|
||||
resolvedChatsPercentage?: number; // Percentage of resolved chats
|
||||
topQuestions?: TopQuestion[]; // Top 5 most asked questions
|
||||
|
||||
// Debug properties
|
||||
totalSessionDuration?: number;
|
||||
validSessionsForDuration?: number;
|
||||
|
||||
@ -1,6 +0,0 @@
|
||||
import { clsx, type ClassValue } from "clsx"
|
||||
import { twMerge } from "tailwind-merge"
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
||||
@ -1,129 +0,0 @@
|
||||
import { PrismaClient, ProcessingStage, ProcessingStatus } from '@prisma/client';
|
||||
import { ProcessingStatusManager } from './lib/processingStatusManager';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function migrateToRefactoredSystem() {
|
||||
try {
|
||||
console.log('=== MIGRATING TO REFACTORED PROCESSING SYSTEM ===\n');
|
||||
|
||||
// Get all existing sessions
|
||||
const sessions = await prisma.session.findMany({
|
||||
include: {
|
||||
import: true,
|
||||
messages: true,
|
||||
sessionQuestions: true,
|
||||
},
|
||||
orderBy: { createdAt: 'asc' }
|
||||
});
|
||||
|
||||
console.log(`Found ${sessions.length} sessions to migrate...\n`);
|
||||
|
||||
let migratedCount = 0;
|
||||
|
||||
for (const session of sessions) {
|
||||
console.log(`Migrating session ${session.import?.externalSessionId || session.id}...`);
|
||||
|
||||
// Initialize processing status for this session
|
||||
await ProcessingStatusManager.initializeSession(session.id);
|
||||
|
||||
// Determine the current state of each stage based on existing data
|
||||
|
||||
// 1. CSV_IMPORT - Always completed if session exists
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.CSV_IMPORT, {
|
||||
migratedFrom: 'existing_session',
|
||||
importId: session.importId
|
||||
});
|
||||
|
||||
// 2. TRANSCRIPT_FETCH - Check if transcript content exists
|
||||
if (session.import?.rawTranscriptContent) {
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.TRANSCRIPT_FETCH, {
|
||||
migratedFrom: 'existing_transcript',
|
||||
contentLength: session.import.rawTranscriptContent.length
|
||||
});
|
||||
} else if (!session.import?.fullTranscriptUrl) {
|
||||
// No transcript URL - skip this stage
|
||||
await ProcessingStatusManager.skipStage(session.id, ProcessingStage.TRANSCRIPT_FETCH, 'No transcript URL in original import');
|
||||
} else {
|
||||
// Has URL but no content - mark as pending for retry
|
||||
console.log(` - Transcript fetch pending for ${session.import.externalSessionId}`);
|
||||
}
|
||||
|
||||
// 3. SESSION_CREATION - Check if messages exist
|
||||
if (session.messages.length > 0) {
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.SESSION_CREATION, {
|
||||
migratedFrom: 'existing_messages',
|
||||
messageCount: session.messages.length
|
||||
});
|
||||
} else if (session.import?.rawTranscriptContent) {
|
||||
// Has transcript but no messages - needs reprocessing
|
||||
console.log(` - Session creation pending for ${session.import.externalSessionId} (has transcript but no messages)`);
|
||||
} else {
|
||||
// No transcript content - skip or mark as pending based on transcript fetch status
|
||||
if (!session.import?.fullTranscriptUrl) {
|
||||
await ProcessingStatusManager.skipStage(session.id, ProcessingStage.SESSION_CREATION, 'No transcript content available');
|
||||
}
|
||||
}
|
||||
|
||||
// 4. AI_ANALYSIS - Check if AI fields are populated
|
||||
const hasAIAnalysis = session.summary || session.sentiment || session.category || session.language;
|
||||
if (hasAIAnalysis) {
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.AI_ANALYSIS, {
|
||||
migratedFrom: 'existing_ai_analysis',
|
||||
hasSummary: !!session.summary,
|
||||
hasSentiment: !!session.sentiment,
|
||||
hasCategory: !!session.category,
|
||||
hasLanguage: !!session.language
|
||||
});
|
||||
} else {
|
||||
// No AI analysis - mark as pending if session creation is complete
|
||||
if (session.messages.length > 0) {
|
||||
console.log(` - AI analysis pending for ${session.import?.externalSessionId}`);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. QUESTION_EXTRACTION - Check if questions exist
|
||||
if (session.sessionQuestions.length > 0) {
|
||||
await ProcessingStatusManager.completeStage(session.id, ProcessingStage.QUESTION_EXTRACTION, {
|
||||
migratedFrom: 'existing_questions',
|
||||
questionCount: session.sessionQuestions.length
|
||||
});
|
||||
} else {
|
||||
// No questions - mark as pending if AI analysis is complete
|
||||
if (hasAIAnalysis) {
|
||||
console.log(` - Question extraction pending for ${session.import?.externalSessionId}`);
|
||||
}
|
||||
}
|
||||
|
||||
migratedCount++;
|
||||
|
||||
if (migratedCount % 10 === 0) {
|
||||
console.log(` Migrated ${migratedCount}/${sessions.length} sessions...`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n✓ Successfully migrated ${migratedCount} sessions to the new processing system`);
|
||||
|
||||
// Show final status
|
||||
console.log('\n=== MIGRATION COMPLETE - FINAL STATUS ===');
|
||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||
|
||||
const stages = ['CSV_IMPORT', 'TRANSCRIPT_FETCH', 'SESSION_CREATION', 'AI_ANALYSIS', 'QUESTION_EXTRACTION'];
|
||||
|
||||
for (const stage of stages) {
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
const pending = stageData.PENDING || 0;
|
||||
const completed = stageData.COMPLETED || 0;
|
||||
const skipped = stageData.SKIPPED || 0;
|
||||
|
||||
console.log(`${stage}: ${completed} completed, ${pending} pending, ${skipped} skipped`);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error migrating to refactored system:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
migrateToRefactoredSystem();
|
||||
54
migrations/0001_initial_schema.sql
Normal file
54
migrations/0001_initial_schema.sql
Normal file
@ -0,0 +1,54 @@
|
||||
-- Initial database schema for LiveDash-Node
|
||||
-- This combines the init migration and transcript_content addition
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Company" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"csvUrl" TEXT NOT NULL,
|
||||
"csvUsername" TEXT,
|
||||
"csvPassword" TEXT,
|
||||
"sentimentAlert" REAL,
|
||||
"dashboardOpts" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"password" TEXT NOT NULL,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"role" TEXT NOT NULL,
|
||||
"resetToken" TEXT,
|
||||
"resetTokenExpiry" DATETIME,
|
||||
CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Session" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"startTime" DATETIME NOT NULL,
|
||||
"endTime" DATETIME NOT NULL,
|
||||
"ipAddress" TEXT,
|
||||
"country" TEXT,
|
||||
"language" TEXT,
|
||||
"messagesSent" INTEGER,
|
||||
"sentiment" REAL,
|
||||
"escalated" BOOLEAN,
|
||||
"forwardedHr" BOOLEAN,
|
||||
"fullTranscriptUrl" TEXT,
|
||||
"transcriptContent" TEXT,
|
||||
"avgResponseTime" REAL,
|
||||
"tokens" INTEGER,
|
||||
"tokensEur" REAL,
|
||||
"category" TEXT,
|
||||
"initialMsg" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
@ -5,8 +5,10 @@ const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
// Allow cross-origin requests from specific origins in development
|
||||
allowedDevOrigins: [
|
||||
"192.168.1.2",
|
||||
"localhost",
|
||||
"127.0.0.1"
|
||||
"propc",
|
||||
"test123.kjanat.com",
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
119
package.json
119
package.json
@ -5,32 +5,39 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"dev": "tsx server.ts",
|
||||
"dev:next-only": "next dev --turbopack",
|
||||
"format": "npx prettier --write .",
|
||||
"format:check": "npx prettier --check .",
|
||||
"format": "pnpm dlx prettier --write .",
|
||||
"format:check": "pnpm dlx prettier --check .",
|
||||
"format:standard": "pnpm dlx standard . --fix",
|
||||
"lint": "next lint",
|
||||
"lint:fix": "npx eslint --fix",
|
||||
"lint:fix": "pnpm dlx eslint --fix",
|
||||
"prisma:generate": "prisma generate",
|
||||
"prisma:migrate": "prisma migrate dev",
|
||||
"prisma:seed": "tsx prisma/seed.ts",
|
||||
"prisma:push": "prisma db push",
|
||||
"prisma:push:force": "prisma db push --force-reset",
|
||||
"prisma:seed": "node prisma/seed.mjs",
|
||||
"prisma:studio": "prisma studio",
|
||||
"start": "node server.mjs",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"start": "next start",
|
||||
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
||||
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\""
|
||||
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
||||
"cf-typegen": "wrangler types",
|
||||
"check": "tsc && wrangler deploy --dry-run",
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "next dev",
|
||||
"dev:old": "next dev --turbopack",
|
||||
"dev:cf": "wrangler dev",
|
||||
"predeploy": "wrangler d1 migrations apply DB --remote",
|
||||
"seedLocalD1": "wrangler d1 migrations apply DB --local",
|
||||
"d1:list": "wrangler d1 list",
|
||||
"d1:info": "wrangler d1 info d1-notso-livedash",
|
||||
"d1:info:remote": "wrangler d1 info d1-notso-livedash --remote",
|
||||
"d1:query": "node scripts/d1-query.js",
|
||||
"d1:export": "wrangler d1 export d1-notso-livedash",
|
||||
"d1:export:remote": "wrangler d1 export d1-notso-livedash --remote",
|
||||
"d1:backup": "wrangler d1 export d1-notso-livedash --output backups/$(date +%Y%m%d_%H%M%S)_backup.sql",
|
||||
"d1:schema": "wrangler d1 export d1-notso-livedash --no-data --output schema.sql",
|
||||
"d1": "node scripts/d1.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@prisma/adapter-pg": "^6.10.1",
|
||||
"@prisma/client": "^6.10.1",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.15",
|
||||
"@radix-ui/react-separator": "^1.1.7",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-tooltip": "^1.2.7",
|
||||
"@prisma/adapter-d1": "^6.8.2",
|
||||
"@prisma/client": "^6.8.2",
|
||||
"@rapideditor/country-coder": "^5.4.0",
|
||||
"@types/d3": "^7.4.3",
|
||||
"@types/d3-cloud": "^1.2.9",
|
||||
@ -39,60 +46,50 @@
|
||||
"@types/leaflet": "^1.9.18",
|
||||
"@types/node-fetch": "^2.6.12",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"csv-parse": "^5.5.0",
|
||||
"chart.js": "^4.4.9",
|
||||
"chartjs-plugin-annotation": "^3.1.0",
|
||||
"csv-parse": "^5.6.0",
|
||||
"d3": "^7.9.0",
|
||||
"d3-cloud": "^1.2.7",
|
||||
"d3-selection": "^3.0.0",
|
||||
"i18n-iso-countries": "^7.14.0",
|
||||
"iso-639-1": "^3.1.5",
|
||||
"leaflet": "^1.9.4",
|
||||
"lucide-react": "^0.525.0",
|
||||
"next": "^15.3.2",
|
||||
"next": "^15.3.3",
|
||||
"next-auth": "^4.24.11",
|
||||
"node-cron": "^4.0.7",
|
||||
"node-cron": "^4.1.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"react": "^19.1.0",
|
||||
"react-chartjs-2": "^5.3.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-leaflet": "^5.0.0",
|
||||
"react-markdown": "^10.1.0",
|
||||
"recharts": "^3.0.2",
|
||||
"rehype-raw": "^7.0.0",
|
||||
"tailwind-merge": "^3.3.1"
|
||||
"rehype-raw": "^7.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.27.0",
|
||||
"@eslint/js": "^9.28.0",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@tailwindcss/postcss": "^4.1.11",
|
||||
"@testing-library/dom": "^10.4.0",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@types/bcryptjs": "^2.4.2",
|
||||
"@types/node": "^22.15.21",
|
||||
"@types/node-cron": "^3.0.8",
|
||||
"@types/react": "^19.1.5",
|
||||
"@tailwindcss/postcss": "^4.1.8",
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/node": "^22.15.29",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/react": "^19.1.6",
|
||||
"@types/react-dom": "^19.1.5",
|
||||
"@typescript-eslint/eslint-plugin": "^8.32.1",
|
||||
"@typescript-eslint/parser": "^8.32.1",
|
||||
"@vitejs/plugin-react": "^4.6.0",
|
||||
"@vitest/coverage-v8": "^3.2.4",
|
||||
"eslint": "^9.27.0",
|
||||
"eslint-config-next": "^15.3.2",
|
||||
"eslint-plugin-prettier": "^5.4.0",
|
||||
"jsdom": "^26.1.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.33.0",
|
||||
"@typescript-eslint/parser": "^8.33.0",
|
||||
"eslint": "^9.28.0",
|
||||
"eslint-config-next": "^15.3.3",
|
||||
"eslint-plugin-prettier": "^5.4.1",
|
||||
"markdownlint-cli2": "^0.18.1",
|
||||
"postcss": "^8.5.3",
|
||||
"postcss": "^8.5.4",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-jinja-template": "^2.1.0",
|
||||
"prisma": "^6.10.1",
|
||||
"tailwindcss": "^4.1.11",
|
||||
"prisma": "^6.8.2",
|
||||
"tailwindcss": "^4.1.8",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsx": "^4.20.3",
|
||||
"tw-animate-css": "^1.3.4",
|
||||
"typescript": "^5.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.4",
|
||||
"vitest": "^3.2.4"
|
||||
"typescript": "^5.8.3",
|
||||
"wrangler": "4.18.0"
|
||||
},
|
||||
"prettier": {
|
||||
"bracketSpacing": true,
|
||||
@ -143,5 +140,21 @@
|
||||
"*.json"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"cloudflare": {
|
||||
"label": "Worker + D1 Database",
|
||||
"products": [
|
||||
"Workers",
|
||||
"D1"
|
||||
],
|
||||
"categories": [
|
||||
"storage"
|
||||
],
|
||||
"icon_urls": [
|
||||
"https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/c6fc5da3-1e0a-4608-b2f1-9628577ec800/public",
|
||||
"https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/5ca0ca32-e897-4699-d4c1-6b680512f000/public"
|
||||
],
|
||||
"docs_url": "https://developers.cloudflare.com/d1/",
|
||||
"preview_image_url": "https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/cb7cb0a9-6102-4822-633c-b76b7bb25900/public",
|
||||
"publish": true
|
||||
}
|
||||
}
|
||||
|
||||
174
pages/api/admin/refresh-sessions.ts
Normal file
174
pages/api/admin/refresh-sessions.ts
Normal file
@ -0,0 +1,174 @@
|
||||
// API route to refresh (fetch+parse+update) session data for a company
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { fetchAndParseCsv } from "../../../lib/csvFetcher";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
|
||||
interface SessionCreateData {
|
||||
id: string;
|
||||
startTime: Date;
|
||||
companyId: string;
|
||||
sessionId?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches transcript content from a URL with optional authentication
|
||||
* @param url The URL to fetch the transcript from
|
||||
* @param username Optional username for Basic Auth
|
||||
* @param password Optional password for Basic Auth
|
||||
* @returns The transcript content or null if fetching fails
|
||||
*/
|
||||
async function fetchTranscriptContent(
|
||||
url: string,
|
||||
username?: string,
|
||||
password?: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
process.stderr.write(
|
||||
`Error fetching transcript: ${response.statusText}\n`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return await response.text();
|
||||
} catch (error) {
|
||||
process.stderr.write(`Failed to fetch transcript: ${error}\n`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
// Check if this is a POST request
|
||||
if (req.method !== "POST") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
// Get companyId from body or query
|
||||
let { companyId } = req.body;
|
||||
|
||||
if (!companyId) {
|
||||
// Try to get user from prisma based on session cookie
|
||||
try {
|
||||
const session = await prisma.session.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
where: {
|
||||
/* Add session check criteria here */
|
||||
},
|
||||
});
|
||||
|
||||
if (session) {
|
||||
companyId = session.companyId;
|
||||
}
|
||||
} catch (error) {
|
||||
// Log error for server-side debugging
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
// Use a server-side logging approach instead of console
|
||||
process.stderr.write(`Error fetching session: ${errorMessage}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!companyId) {
|
||||
return res.status(400).json({ error: "Company ID is required" });
|
||||
}
|
||||
|
||||
const company = await prisma.company.findUnique({ where: { id: companyId } });
|
||||
if (!company) return res.status(404).json({ error: "Company not found" });
|
||||
|
||||
try {
|
||||
const sessions = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
// Replace all session rows for this company (for demo simplicity)
|
||||
await prisma.session.deleteMany({ where: { companyId: company.id } });
|
||||
|
||||
for (const session of sessions) {
|
||||
const sessionData: SessionCreateData = {
|
||||
...session,
|
||||
companyId: company.id,
|
||||
id:
|
||||
session.id ||
|
||||
session.sessionId ||
|
||||
`sess_${Date.now()}_${Math.random().toString(36).substring(2, 7)}`,
|
||||
// Ensure startTime is not undefined
|
||||
startTime: session.startTime || new Date(),
|
||||
};
|
||||
|
||||
// Validate dates to prevent "Invalid Date" errors
|
||||
const startTime =
|
||||
sessionData.startTime instanceof Date &&
|
||||
!isNaN(sessionData.startTime.getTime())
|
||||
? sessionData.startTime
|
||||
: new Date();
|
||||
|
||||
const endTime =
|
||||
session.endTime instanceof Date && !isNaN(session.endTime.getTime())
|
||||
? session.endTime
|
||||
: new Date();
|
||||
|
||||
// Fetch transcript content if URL is available
|
||||
let transcriptContent: string | null = null;
|
||||
if (session.fullTranscriptUrl) {
|
||||
transcriptContent = await fetchTranscriptContent(
|
||||
session.fullTranscriptUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
}
|
||||
|
||||
// Only include fields that are properly typed for Prisma
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
id: sessionData.id,
|
||||
companyId: sessionData.companyId,
|
||||
startTime: startTime,
|
||||
endTime: endTime,
|
||||
ipAddress: session.ipAddress || null,
|
||||
country: session.country || null,
|
||||
language: session.language || null,
|
||||
messagesSent:
|
||||
typeof session.messagesSent === "number" ? session.messagesSent : 0,
|
||||
sentiment:
|
||||
typeof session.sentiment === "number" ? session.sentiment : null,
|
||||
escalated:
|
||||
typeof session.escalated === "boolean" ? session.escalated : null,
|
||||
forwardedHr:
|
||||
typeof session.forwardedHr === "boolean"
|
||||
? session.forwardedHr
|
||||
: null,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||
transcriptContent: transcriptContent, // Add the transcript content
|
||||
avgResponseTime:
|
||||
typeof session.avgResponseTime === "number"
|
||||
? session.avgResponseTime
|
||||
: null,
|
||||
tokens: typeof session.tokens === "number" ? session.tokens : null,
|
||||
tokensEur:
|
||||
typeof session.tokensEur === "number" ? session.tokensEur : null,
|
||||
category: session.category || null,
|
||||
initialMsg: session.initialMsg || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ ok: true, imported: sessions.length });
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e.message : "An unknown error occurred";
|
||||
res.status(500).json({ error });
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
import NextAuth, { NextAuthOptions } from "next-auth";
|
||||
import CredentialsProvider from "next-auth/providers/credentials";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
// Define the shape of the JWT token
|
||||
@ -101,6 +101,4 @@ export const authOptions: NextAuthOptions = {
|
||||
debug: process.env.NODE_ENV === "development",
|
||||
};
|
||||
|
||||
const handler = NextAuth(authOptions);
|
||||
|
||||
export { handler as GET, handler as POST };
|
||||
export default NextAuth(authOptions);
|
||||
36
pages/api/dashboard/config.ts
Normal file
36
pages/api/dashboard/config.ts
Normal file
@ -0,0 +1,36 @@
|
||||
// API endpoint: update company CSV URL config
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
if (!session?.user) return res.status(401).json({ error: "Not logged in" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
if (req.method === "POST") {
|
||||
const { csvUrl } = req.body;
|
||||
await prisma.company.update({
|
||||
where: { id: user.companyId },
|
||||
data: { csvUrl },
|
||||
});
|
||||
res.json({ ok: true });
|
||||
} else if (req.method === "GET") {
|
||||
// Get company data
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: user.companyId },
|
||||
});
|
||||
res.json({ company });
|
||||
} else {
|
||||
res.status(405).end();
|
||||
}
|
||||
}
|
||||
83
pages/api/dashboard/metrics.ts
Normal file
83
pages/api/dashboard/metrics.ts
Normal file
@ -0,0 +1,83 @@
|
||||
// API endpoint: return metrics for current company
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { sessionMetrics } from "../../../lib/metrics";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
import { ChatSession } from "../../../lib/types"; // Import ChatSession
|
||||
|
||||
interface SessionUser {
|
||||
email: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface SessionData {
|
||||
user: SessionUser;
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = (await getServerSession(
|
||||
req,
|
||||
res,
|
||||
authOptions
|
||||
)) as SessionData | null;
|
||||
if (!session?.user) return res.status(401).json({ error: "Not logged in" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email },
|
||||
include: { company: true },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
const prismaSessions = await prisma.session.findMany({
|
||||
where: { companyId: user.companyId },
|
||||
});
|
||||
|
||||
// Convert Prisma sessions to ChatSession[] type for sessionMetrics
|
||||
const chatSessions: ChatSession[] = prismaSessions.map((ps) => ({
|
||||
id: ps.id, // Map Prisma's id to ChatSession.id
|
||||
sessionId: ps.id, // Map Prisma's id to ChatSession.sessionId
|
||||
companyId: ps.companyId,
|
||||
startTime: new Date(ps.startTime), // Ensure startTime is a Date object
|
||||
endTime: ps.endTime ? new Date(ps.endTime) : null, // Ensure endTime is a Date object or null
|
||||
transcriptContent: ps.transcriptContent || "", // Ensure transcriptContent is a string
|
||||
createdAt: new Date(ps.createdAt), // Map Prisma's createdAt
|
||||
updatedAt: new Date(ps.createdAt), // Use createdAt for updatedAt as Session model doesn't have updatedAt
|
||||
category: ps.category || undefined,
|
||||
language: ps.language || undefined,
|
||||
country: ps.country || undefined,
|
||||
ipAddress: ps.ipAddress || undefined,
|
||||
sentiment: ps.sentiment === null ? undefined : ps.sentiment,
|
||||
messagesSent: ps.messagesSent === null ? undefined : ps.messagesSent, // Handle null messagesSent
|
||||
avgResponseTime:
|
||||
ps.avgResponseTime === null ? undefined : ps.avgResponseTime,
|
||||
tokens: ps.tokens === null ? undefined : ps.tokens,
|
||||
tokensEur: ps.tokensEur === null ? undefined : ps.tokensEur,
|
||||
escalated: ps.escalated || false,
|
||||
forwardedHr: ps.forwardedHr || false,
|
||||
initialMsg: ps.initialMsg || undefined,
|
||||
fullTranscriptUrl: ps.fullTranscriptUrl || undefined,
|
||||
// userId is missing in Prisma Session model, assuming it's not strictly needed for metrics or can be null
|
||||
userId: undefined, // Or some other default/mapping if available
|
||||
}));
|
||||
|
||||
// Pass company config to metrics
|
||||
const companyConfigForMetrics = {
|
||||
sentimentAlert:
|
||||
user.company.sentimentAlert === null
|
||||
? undefined
|
||||
: user.company.sentimentAlert,
|
||||
};
|
||||
|
||||
const metrics = sessionMetrics(chatSessions, companyConfigForMetrics);
|
||||
|
||||
res.json({
|
||||
metrics,
|
||||
csvUrl: user.company.csvUrl,
|
||||
company: user.company,
|
||||
});
|
||||
}
|
||||
@ -1,14 +1,23 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { SessionFilterOptions } from "../../../../lib/types";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { SessionFilterOptions } from "../../../lib/types";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const authSession = await getServerSession(authOptions);
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<
|
||||
SessionFilterOptions | { error: string; details?: string }
|
||||
>
|
||||
) {
|
||||
if (req.method !== "GET") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
const authSession = await getServerSession(req, res, authOptions);
|
||||
|
||||
if (!authSession || !authSession.user?.companyId) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
const companyId = authSession.user.companyId;
|
||||
@ -53,19 +62,15 @@ export async function GET(request: NextRequest) {
|
||||
.map((s) => s.language)
|
||||
.filter(Boolean) as string[]; // Filter out any nulls and assert as string[]
|
||||
|
||||
return NextResponse.json({
|
||||
categories: distinctCategories,
|
||||
languages: distinctLanguages
|
||||
});
|
||||
return res
|
||||
.status(200)
|
||||
.json({ categories: distinctCategories, languages: distinctLanguages });
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "An unknown error occurred";
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: "Failed to fetch filter options",
|
||||
details: errorMessage,
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
return res.status(500).json({
|
||||
error: "Failed to fetch filter options",
|
||||
details: errorMessage,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -1,35 +1,28 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { prisma } from "../../../../../lib/prisma";
|
||||
import { ChatSession } from "../../../../../lib/types";
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { ChatSession } from "../../../../lib/types";
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: { id: string } }
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const { id } = params;
|
||||
if (req.method !== "GET") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
return NextResponse.json(
|
||||
{ error: "Session ID is required" },
|
||||
{ status: 400 }
|
||||
);
|
||||
const { id } = req.query;
|
||||
|
||||
if (!id || typeof id !== "string") {
|
||||
return res.status(400).json({ error: "Session ID is required" });
|
||||
}
|
||||
|
||||
try {
|
||||
const prismaSession = await prisma.session.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
messages: {
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!prismaSession) {
|
||||
return NextResponse.json(
|
||||
{ error: "Session not found" },
|
||||
{ status: 404 }
|
||||
);
|
||||
return res.status(404).json({ error: "Session not found" });
|
||||
}
|
||||
|
||||
// Map Prisma session object to ChatSession type
|
||||
@ -57,29 +50,19 @@ export async function GET(
|
||||
avgResponseTime: prismaSession.avgResponseTime ?? null,
|
||||
escalated: prismaSession.escalated ?? undefined,
|
||||
forwardedHr: prismaSession.forwardedHr ?? undefined,
|
||||
tokens: prismaSession.tokens ?? undefined,
|
||||
tokensEur: prismaSession.tokensEur ?? undefined,
|
||||
initialMsg: prismaSession.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: prismaSession.fullTranscriptUrl ?? null,
|
||||
summary: prismaSession.summary ?? null, // New field
|
||||
transcriptContent: null, // Not available in Session model
|
||||
messages:
|
||||
prismaSession.messages?.map((msg) => ({
|
||||
id: msg.id,
|
||||
sessionId: msg.sessionId,
|
||||
timestamp: msg.timestamp ? new Date(msg.timestamp) : new Date(),
|
||||
role: msg.role,
|
||||
content: msg.content,
|
||||
order: msg.order,
|
||||
createdAt: new Date(msg.createdAt),
|
||||
})) ?? [], // New field - parsed messages
|
||||
transcriptContent: prismaSession.transcriptContent ?? null,
|
||||
};
|
||||
|
||||
return NextResponse.json({ session });
|
||||
return res.status(200).json({ session });
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "An unknown error occurred";
|
||||
return NextResponse.json(
|
||||
{ error: "Failed to fetch session", details: errorMessage },
|
||||
{ status: 500 }
|
||||
);
|
||||
return res
|
||||
.status(500)
|
||||
.json({ error: "Failed to fetch session", details: errorMessage });
|
||||
}
|
||||
}
|
||||
@ -1,33 +1,40 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "../../auth/[...nextauth]/route";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import {
|
||||
ChatSession,
|
||||
SessionApiResponse,
|
||||
SessionQuery,
|
||||
} from "../../../../lib/types";
|
||||
} from "../../../lib/types";
|
||||
import { Prisma } from "@prisma/client";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const authSession = await getServerSession(authOptions);
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<SessionApiResponse | { error: string; details?: string }>
|
||||
) {
|
||||
if (req.method !== "GET") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
const authSession = await getServerSession(req, res, authOptions);
|
||||
|
||||
if (!authSession || !authSession.user?.companyId) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
const companyId = authSession.user.companyId;
|
||||
const { searchParams } = new URL(request.url);
|
||||
|
||||
const searchTerm = searchParams.get("searchTerm");
|
||||
const category = searchParams.get("category");
|
||||
const language = searchParams.get("language");
|
||||
const startDate = searchParams.get("startDate");
|
||||
const endDate = searchParams.get("endDate");
|
||||
const sortKey = searchParams.get("sortKey");
|
||||
const sortOrder = searchParams.get("sortOrder");
|
||||
const queryPage = searchParams.get("page");
|
||||
const queryPageSize = searchParams.get("pageSize");
|
||||
const {
|
||||
searchTerm,
|
||||
category,
|
||||
language,
|
||||
startDate,
|
||||
endDate,
|
||||
sortKey,
|
||||
sortOrder,
|
||||
page: queryPage,
|
||||
pageSize: queryPageSize,
|
||||
} = req.query as SessionQuery;
|
||||
|
||||
const page = Number(queryPage) || 1;
|
||||
const pageSize = Number(queryPageSize) || 10;
|
||||
@ -36,34 +43,38 @@ export async function GET(request: NextRequest) {
|
||||
const whereClause: Prisma.SessionWhereInput = { companyId };
|
||||
|
||||
// Search Term
|
||||
if (searchTerm && searchTerm.trim() !== "") {
|
||||
if (
|
||||
searchTerm &&
|
||||
typeof searchTerm === "string" &&
|
||||
searchTerm.trim() !== ""
|
||||
) {
|
||||
const searchConditions = [
|
||||
{ id: { contains: searchTerm } },
|
||||
{ category: { contains: searchTerm } },
|
||||
{ initialMsg: { contains: searchTerm } },
|
||||
{ summary: { contains: searchTerm } },
|
||||
{ transcriptContent: { contains: searchTerm } },
|
||||
];
|
||||
whereClause.OR = searchConditions;
|
||||
}
|
||||
|
||||
// Category Filter
|
||||
if (category && category.trim() !== "") {
|
||||
// Cast to SessionCategory enum if it's a valid value
|
||||
whereClause.category = category as any;
|
||||
if (category && typeof category === "string" && category.trim() !== "") {
|
||||
whereClause.category = category;
|
||||
}
|
||||
|
||||
// Language Filter
|
||||
if (language && language.trim() !== "") {
|
||||
if (language && typeof language === "string" && language.trim() !== "") {
|
||||
whereClause.language = language;
|
||||
}
|
||||
|
||||
// Date Range Filter
|
||||
if (startDate) {
|
||||
if (startDate && typeof startDate === "string") {
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
gte: new Date(startDate),
|
||||
};
|
||||
}
|
||||
if (endDate) {
|
||||
if (endDate && typeof endDate === "string") {
|
||||
const inclusiveEndDate = new Date(endDate);
|
||||
inclusiveEndDate.setDate(inclusiveEndDate.getDate() + 1);
|
||||
whereClause.startTime = {
|
||||
@ -87,7 +98,7 @@ export async function GET(request: NextRequest) {
|
||||
| Prisma.SessionOrderByWithRelationInput[];
|
||||
|
||||
const primarySortField =
|
||||
sortKey && validSortKeys[sortKey]
|
||||
sortKey && typeof sortKey === "string" && validSortKeys[sortKey]
|
||||
? validSortKeys[sortKey]
|
||||
: "startTime"; // Default to startTime field if sortKey is invalid/missing
|
||||
|
||||
@ -104,6 +115,9 @@ export async function GET(request: NextRequest) {
|
||||
{ startTime: "desc" },
|
||||
];
|
||||
}
|
||||
// Note: If sortKey was initially undefined or invalid, primarySortField defaults to "startTime",
|
||||
// and primarySortOrder defaults to "desc". This makes orderByCondition = { startTime: "desc" },
|
||||
// which is the correct overall default sort.
|
||||
|
||||
const prismaSessions = await prisma.session.findMany({
|
||||
where: whereClause,
|
||||
@ -132,18 +146,19 @@ export async function GET(request: NextRequest) {
|
||||
avgResponseTime: ps.avgResponseTime ?? null,
|
||||
escalated: ps.escalated ?? undefined,
|
||||
forwardedHr: ps.forwardedHr ?? undefined,
|
||||
tokens: ps.tokens ?? undefined,
|
||||
tokensEur: ps.tokensEur ?? undefined,
|
||||
initialMsg: ps.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: ps.fullTranscriptUrl ?? null,
|
||||
transcriptContent: null, // Transcript content is now fetched from fullTranscriptUrl when needed
|
||||
transcriptContent: ps.transcriptContent ?? null,
|
||||
}));
|
||||
|
||||
return NextResponse.json({ sessions, totalSessions });
|
||||
return res.status(200).json({ sessions, totalSessions });
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "An unknown error occurred";
|
||||
return NextResponse.json(
|
||||
{ error: "Failed to fetch sessions", details: errorMessage },
|
||||
{ status: 500 }
|
||||
);
|
||||
return res
|
||||
.status(500)
|
||||
.json({ error: "Failed to fetch sessions", details: errorMessage });
|
||||
}
|
||||
}
|
||||
37
pages/api/dashboard/settings.ts
Normal file
37
pages/api/dashboard/settings.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
if (!session?.user || session.user.role !== "admin")
|
||||
return res.status(403).json({ error: "Forbidden" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
if (req.method === "POST") {
|
||||
const { csvUrl, csvUsername, csvPassword, sentimentThreshold } = req.body;
|
||||
await prisma.company.update({
|
||||
where: { id: user.companyId },
|
||||
data: {
|
||||
csvUrl,
|
||||
csvUsername,
|
||||
...(csvPassword ? { csvPassword } : {}),
|
||||
sentimentAlert: sentimentThreshold
|
||||
? parseFloat(sentimentThreshold)
|
||||
: null,
|
||||
},
|
||||
});
|
||||
res.json({ ok: true });
|
||||
} else {
|
||||
res.status(405).end();
|
||||
}
|
||||
}
|
||||
59
pages/api/dashboard/users.ts
Normal file
59
pages/api/dashboard/users.ts
Normal file
@ -0,0 +1,59 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import crypto from "crypto";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
// User type from prisma is used instead of the one in lib/types
|
||||
|
||||
interface UserBasicInfo {
|
||||
id: string;
|
||||
email: string;
|
||||
role: string;
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
if (!session?.user || session.user.role !== "admin")
|
||||
return res.status(403).json({ error: "Forbidden" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
if (req.method === "GET") {
|
||||
const users = await prisma.user.findMany({
|
||||
where: { companyId: user.companyId },
|
||||
});
|
||||
|
||||
const mappedUsers: UserBasicInfo[] = users.map((u) => ({
|
||||
id: u.id,
|
||||
email: u.email,
|
||||
role: u.role,
|
||||
}));
|
||||
|
||||
res.json({ users: mappedUsers });
|
||||
} else if (req.method === "POST") {
|
||||
const { email, role } = req.body;
|
||||
if (!email || !role)
|
||||
return res.status(400).json({ error: "Missing fields" });
|
||||
const exists = await prisma.user.findUnique({ where: { email } });
|
||||
if (exists) return res.status(409).json({ error: "Email exists" });
|
||||
const tempPassword = crypto.randomBytes(12).toString("base64").slice(0, 12); // secure random initial password
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: await bcrypt.hash(tempPassword, 10),
|
||||
companyId: user.companyId,
|
||||
role,
|
||||
},
|
||||
});
|
||||
// TODO: Email user their temp password (stub, for demo) - Implement a robust and secure email sending mechanism. Consider using a transactional email service.
|
||||
res.json({ ok: true, tempPassword });
|
||||
} else res.status(405).end();
|
||||
}
|
||||
31
pages/api/forgot-password.ts
Normal file
31
pages/api/forgot-password.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { prisma } from "../../lib/prisma";
|
||||
import { sendEmail } from "../../lib/sendEmail";
|
||||
import crypto from "crypto";
|
||||
import type { NextApiRequest, NextApiResponse } from "next";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
if (req.method !== "POST") {
|
||||
res.setHeader("Allow", ["POST"]);
|
||||
return res.status(405).end(`Method ${req.method} Not Allowed`);
|
||||
}
|
||||
|
||||
// Type the body with a type assertion
|
||||
const { email } = req.body as { email: string };
|
||||
|
||||
const user = await prisma.user.findUnique({ where: { email } });
|
||||
if (!user) return res.status(200).end(); // always 200 for privacy
|
||||
|
||||
const token = crypto.randomBytes(32).toString("hex");
|
||||
const expiry = new Date(Date.now() + 1000 * 60 * 30); // 30 min expiry
|
||||
await prisma.user.update({
|
||||
where: { email },
|
||||
data: { resetToken: token, resetTokenExpiry: expiry },
|
||||
});
|
||||
|
||||
const resetUrl = `${process.env.NEXTAUTH_URL || "http://localhost:3000"}/reset-password?token=${token}`;
|
||||
await sendEmail(email, "Password Reset", `Reset your password: ${resetUrl}`);
|
||||
res.status(200).end();
|
||||
}
|
||||
56
pages/api/register.ts
Normal file
56
pages/api/register.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { prisma } from "../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { ApiResponse } from "../../lib/types";
|
||||
|
||||
interface RegisterRequestBody {
|
||||
email: string;
|
||||
password: string;
|
||||
company: string;
|
||||
csvUrl?: string;
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<ApiResponse<{ success: boolean } | { error: string }>>
|
||||
) {
|
||||
if (req.method !== "POST") return res.status(405).end();
|
||||
|
||||
const { email, password, company, csvUrl } = req.body as RegisterRequestBody;
|
||||
|
||||
if (!email || !password || !company) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Missing required fields",
|
||||
});
|
||||
}
|
||||
|
||||
// Check if email exists
|
||||
const exists = await prisma.user.findUnique({
|
||||
where: { email },
|
||||
});
|
||||
|
||||
if (exists) {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: "Email already exists",
|
||||
});
|
||||
}
|
||||
|
||||
const newCompany = await prisma.company.create({
|
||||
data: { name: company, csvUrl: csvUrl || "" },
|
||||
});
|
||||
const hashed = await bcrypt.hash(password, 10);
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: hashed,
|
||||
companyId: newCompany.id,
|
||||
role: "admin",
|
||||
},
|
||||
});
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
data: { success: true },
|
||||
});
|
||||
}
|
||||
63
pages/api/reset-password.ts
Normal file
63
pages/api/reset-password.ts
Normal file
@ -0,0 +1,63 @@
|
||||
import { prisma } from "../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import type { NextApiRequest, NextApiResponse } from "next"; // Import official Next.js types
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest, // Use official NextApiRequest
|
||||
res: NextApiResponse // Use official NextApiResponse
|
||||
) {
|
||||
if (req.method !== "POST") {
|
||||
res.setHeader("Allow", ["POST"]); // Good practice to set Allow header for 405
|
||||
return res.status(405).end(`Method ${req.method} Not Allowed`);
|
||||
}
|
||||
|
||||
// It's good practice to explicitly type the expected body for clarity and safety
|
||||
const { token, password } = req.body as { token?: string; password?: string };
|
||||
|
||||
if (!token || !password) {
|
||||
return res.status(400).json({ error: "Token and password are required." });
|
||||
}
|
||||
|
||||
if (password.length < 8) {
|
||||
// Example: Add password complexity rule
|
||||
return res
|
||||
.status(400)
|
||||
.json({ error: "Password must be at least 8 characters long." });
|
||||
}
|
||||
|
||||
try {
|
||||
const user = await prisma.user.findFirst({
|
||||
where: {
|
||||
resetToken: token,
|
||||
resetTokenExpiry: { gte: new Date() },
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return res.status(400).json({
|
||||
error: "Invalid or expired token. Please request a new password reset.",
|
||||
});
|
||||
}
|
||||
|
||||
const hash = await bcrypt.hash(password, 10);
|
||||
await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: {
|
||||
password: hash,
|
||||
resetToken: null,
|
||||
resetTokenExpiry: null,
|
||||
},
|
||||
});
|
||||
|
||||
// Instead of just res.status(200).end(), send a success message
|
||||
return res
|
||||
.status(200)
|
||||
.json({ message: "Password has been reset successfully." });
|
||||
} catch (error) {
|
||||
console.error("Reset password error:", error); // Log the error for server-side debugging
|
||||
// Provide a generic error message to the client
|
||||
return res.status(500).json({
|
||||
error: "An internal server error occurred. Please try again later.",
|
||||
});
|
||||
}
|
||||
}
|
||||
4291
pnpm-lock.yaml
generated
4291
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
50
prisma/migrations/20250521191702_init/migration.sql
Normal file
50
prisma/migrations/20250521191702_init/migration.sql
Normal file
@ -0,0 +1,50 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Company" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"csvUrl" TEXT NOT NULL,
|
||||
"csvUsername" TEXT,
|
||||
"csvPassword" TEXT,
|
||||
"sentimentAlert" REAL,
|
||||
"dashboardOpts" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"password" TEXT NOT NULL,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"role" TEXT NOT NULL,
|
||||
"resetToken" TEXT,
|
||||
"resetTokenExpiry" DATETIME,
|
||||
CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Session" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"startTime" DATETIME NOT NULL,
|
||||
"endTime" DATETIME NOT NULL,
|
||||
"ipAddress" TEXT,
|
||||
"country" TEXT,
|
||||
"language" TEXT,
|
||||
"messagesSent" INTEGER,
|
||||
"sentiment" REAL,
|
||||
"escalated" BOOLEAN,
|
||||
"forwardedHr" BOOLEAN,
|
||||
"fullTranscriptUrl" TEXT,
|
||||
"avgResponseTime" REAL,
|
||||
"tokens" INTEGER,
|
||||
"tokensEur" REAL,
|
||||
"category" TEXT,
|
||||
"initialMsg" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Session" ADD COLUMN "transcriptContent" TEXT;
|
||||
@ -1,227 +0,0 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "UserRole" AS ENUM ('ADMIN', 'USER', 'AUDITOR');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SentimentCategory" AS ENUM ('POSITIVE', 'NEUTRAL', 'NEGATIVE');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SessionCategory" AS ENUM ('SCHEDULE_HOURS', 'LEAVE_VACATION', 'SICK_LEAVE_RECOVERY', 'SALARY_COMPENSATION', 'CONTRACT_HOURS', 'ONBOARDING', 'OFFBOARDING', 'WORKWEAR_STAFF_PASS', 'TEAM_CONTACTS', 'PERSONAL_QUESTIONS', 'ACCESS_LOGIN', 'SOCIAL_QUESTIONS', 'UNRECOGNIZED_OTHER');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "ImportStatus" AS ENUM ('QUEUED', 'PROCESSING', 'DONE', 'ERROR');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Company" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"csvUrl" TEXT NOT NULL,
|
||||
"csvUsername" TEXT,
|
||||
"csvPassword" TEXT,
|
||||
"sentimentAlert" DOUBLE PRECISION,
|
||||
"dashboardOpts" JSONB,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "Company_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"password" TEXT NOT NULL,
|
||||
"role" "UserRole" NOT NULL DEFAULT 'USER',
|
||||
"companyId" TEXT NOT NULL,
|
||||
"resetToken" TEXT,
|
||||
"resetTokenExpiry" TIMESTAMP(3),
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Session" (
|
||||
"id" TEXT NOT NULL,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"importId" TEXT,
|
||||
"startTime" TIMESTAMP(3) NOT NULL,
|
||||
"endTime" TIMESTAMP(3) NOT NULL,
|
||||
"ipAddress" TEXT,
|
||||
"country" TEXT,
|
||||
"fullTranscriptUrl" TEXT,
|
||||
"avgResponseTime" DOUBLE PRECISION,
|
||||
"initialMsg" TEXT,
|
||||
"language" TEXT,
|
||||
"messagesSent" INTEGER,
|
||||
"sentiment" "SentimentCategory",
|
||||
"escalated" BOOLEAN,
|
||||
"forwardedHr" BOOLEAN,
|
||||
"category" "SessionCategory",
|
||||
"summary" TEXT,
|
||||
"processed" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "Session_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "SessionImport" (
|
||||
"id" TEXT NOT NULL,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"externalSessionId" TEXT NOT NULL,
|
||||
"startTimeRaw" TEXT NOT NULL,
|
||||
"endTimeRaw" TEXT NOT NULL,
|
||||
"ipAddress" TEXT,
|
||||
"countryCode" TEXT,
|
||||
"language" TEXT,
|
||||
"messagesSent" INTEGER,
|
||||
"sentimentRaw" TEXT,
|
||||
"escalatedRaw" TEXT,
|
||||
"forwardedHrRaw" TEXT,
|
||||
"fullTranscriptUrl" TEXT,
|
||||
"avgResponseTimeSeconds" DOUBLE PRECISION,
|
||||
"tokens" INTEGER,
|
||||
"tokensEur" DOUBLE PRECISION,
|
||||
"category" TEXT,
|
||||
"initialMessage" TEXT,
|
||||
"rawTranscriptContent" TEXT,
|
||||
"status" "ImportStatus" NOT NULL DEFAULT 'QUEUED',
|
||||
"errorMsg" TEXT,
|
||||
"processedAt" TIMESTAMP(3),
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "SessionImport_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Message" (
|
||||
"id" TEXT NOT NULL,
|
||||
"sessionId" TEXT NOT NULL,
|
||||
"timestamp" TIMESTAMP(3),
|
||||
"role" TEXT NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"order" INTEGER NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "Message_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Question" (
|
||||
"id" TEXT NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "Question_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "SessionQuestion" (
|
||||
"id" TEXT NOT NULL,
|
||||
"sessionId" TEXT NOT NULL,
|
||||
"questionId" TEXT NOT NULL,
|
||||
"order" INTEGER NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "SessionQuestion_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "AIProcessingRequest" (
|
||||
"id" TEXT NOT NULL,
|
||||
"sessionId" TEXT NOT NULL,
|
||||
"openaiRequestId" TEXT,
|
||||
"model" TEXT NOT NULL,
|
||||
"serviceTier" TEXT,
|
||||
"systemFingerprint" TEXT,
|
||||
"promptTokens" INTEGER NOT NULL,
|
||||
"completionTokens" INTEGER NOT NULL,
|
||||
"totalTokens" INTEGER NOT NULL,
|
||||
"cachedTokens" INTEGER,
|
||||
"audioTokensPrompt" INTEGER,
|
||||
"reasoningTokens" INTEGER,
|
||||
"audioTokensCompletion" INTEGER,
|
||||
"acceptedPredictionTokens" INTEGER,
|
||||
"rejectedPredictionTokens" INTEGER,
|
||||
"promptTokenCost" DOUBLE PRECISION NOT NULL,
|
||||
"completionTokenCost" DOUBLE PRECISION NOT NULL,
|
||||
"totalCostEur" DOUBLE PRECISION NOT NULL,
|
||||
"processingType" TEXT NOT NULL,
|
||||
"success" BOOLEAN NOT NULL,
|
||||
"errorMessage" TEXT,
|
||||
"requestedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"completedAt" TIMESTAMP(3),
|
||||
|
||||
CONSTRAINT "AIProcessingRequest_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Session_importId_key" ON "Session"("importId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Session_companyId_startTime_idx" ON "Session"("companyId", "startTime");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "SessionImport_externalSessionId_key" ON "SessionImport"("externalSessionId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SessionImport_status_idx" ON "SessionImport"("status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "SessionImport_companyId_externalSessionId_key" ON "SessionImport"("companyId", "externalSessionId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Message_sessionId_order_idx" ON "Message"("sessionId", "order");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Message_sessionId_order_key" ON "Message"("sessionId", "order");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Question_content_key" ON "Question"("content");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "SessionQuestion_sessionId_idx" ON "SessionQuestion"("sessionId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "SessionQuestion_sessionId_questionId_key" ON "SessionQuestion"("sessionId", "questionId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "SessionQuestion_sessionId_order_key" ON "SessionQuestion"("sessionId", "order");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AIProcessingRequest_sessionId_idx" ON "AIProcessingRequest"("sessionId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AIProcessingRequest_requestedAt_idx" ON "AIProcessingRequest"("requestedAt");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AIProcessingRequest_model_idx" ON "AIProcessingRequest"("model");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "User" ADD CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Session" ADD CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Session" ADD CONSTRAINT "Session_importId_fkey" FOREIGN KEY ("importId") REFERENCES "SessionImport"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "SessionImport" ADD CONSTRAINT "SessionImport_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Message" ADD CONSTRAINT "Message_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "SessionQuestion" ADD CONSTRAINT "SessionQuestion_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "SessionQuestion" ADD CONSTRAINT "SessionQuestion_questionId_fkey" FOREIGN KEY ("questionId") REFERENCES "Question"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AIProcessingRequest" ADD CONSTRAINT "AIProcessingRequest_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@ -1,63 +0,0 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "AIModel" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"provider" TEXT NOT NULL,
|
||||
"maxTokens" INTEGER,
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "AIModel_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "AIModelPricing" (
|
||||
"id" TEXT NOT NULL,
|
||||
"aiModelId" TEXT NOT NULL,
|
||||
"promptTokenCost" DOUBLE PRECISION NOT NULL,
|
||||
"completionTokenCost" DOUBLE PRECISION NOT NULL,
|
||||
"effectiveFrom" TIMESTAMP(3) NOT NULL,
|
||||
"effectiveUntil" TIMESTAMP(3),
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "AIModelPricing_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "CompanyAIModel" (
|
||||
"id" TEXT NOT NULL,
|
||||
"companyId" TEXT NOT NULL,
|
||||
"aiModelId" TEXT NOT NULL,
|
||||
"isDefault" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "CompanyAIModel_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "AIModel_name_key" ON "AIModel"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AIModel_provider_isActive_idx" ON "AIModel"("provider", "isActive");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AIModelPricing_aiModelId_effectiveFrom_idx" ON "AIModelPricing"("aiModelId", "effectiveFrom");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AIModelPricing_effectiveFrom_effectiveUntil_idx" ON "AIModelPricing"("effectiveFrom", "effectiveUntil");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "CompanyAIModel_companyId_isDefault_idx" ON "CompanyAIModel"("companyId", "isDefault");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "CompanyAIModel_companyId_aiModelId_key" ON "CompanyAIModel"("companyId", "aiModelId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AIModelPricing" ADD CONSTRAINT "AIModelPricing_aiModelId_fkey" FOREIGN KEY ("aiModelId") REFERENCES "AIModel"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "CompanyAIModel" ADD CONSTRAINT "CompanyAIModel_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "CompanyAIModel" ADD CONSTRAINT "CompanyAIModel_aiModelId_fkey" FOREIGN KEY ("aiModelId") REFERENCES "AIModel"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@ -1,3 +1,3 @@
|
||||
# Please do not edit this file manually
|
||||
# It should be added in your version-control system (e.g., Git)
|
||||
provider = "postgresql"
|
||||
provider = "sqlite"
|
||||
|
||||
@ -1,376 +1,58 @@
|
||||
// Database schema, one company = one org, linked to users and CSV config
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
previewFeatures = ["driverAdapters"]
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
directUrl = env("DATABASE_URL_DIRECT")
|
||||
provider = "sqlite"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
/**
|
||||
* ENUMS – fewer magic strings
|
||||
*/
|
||||
enum UserRole {
|
||||
ADMIN
|
||||
USER
|
||||
AUDITOR
|
||||
}
|
||||
|
||||
enum SentimentCategory {
|
||||
POSITIVE
|
||||
NEUTRAL
|
||||
NEGATIVE
|
||||
}
|
||||
|
||||
enum SessionCategory {
|
||||
SCHEDULE_HOURS
|
||||
LEAVE_VACATION
|
||||
SICK_LEAVE_RECOVERY
|
||||
SALARY_COMPENSATION
|
||||
CONTRACT_HOURS
|
||||
ONBOARDING
|
||||
OFFBOARDING
|
||||
WORKWEAR_STAFF_PASS
|
||||
TEAM_CONTACTS
|
||||
PERSONAL_QUESTIONS
|
||||
ACCESS_LOGIN
|
||||
SOCIAL_QUESTIONS
|
||||
UNRECOGNIZED_OTHER
|
||||
}
|
||||
|
||||
enum ProcessingStage {
|
||||
CSV_IMPORT // SessionImport created
|
||||
TRANSCRIPT_FETCH // Transcript content fetched
|
||||
SESSION_CREATION // Session + Messages created
|
||||
AI_ANALYSIS // AI processing completed
|
||||
QUESTION_EXTRACTION // Questions extracted
|
||||
}
|
||||
|
||||
enum ProcessingStatus {
|
||||
PENDING
|
||||
IN_PROGRESS
|
||||
COMPLETED
|
||||
FAILED
|
||||
SKIPPED
|
||||
}
|
||||
|
||||
/**
|
||||
* COMPANY (multi-tenant root)
|
||||
*/
|
||||
model Company {
|
||||
id String @id @default(uuid())
|
||||
name String
|
||||
csvUrl String
|
||||
csvUsername String?
|
||||
csvPassword String?
|
||||
sentimentAlert Float?
|
||||
dashboardOpts Json? // JSON column instead of opaque string
|
||||
|
||||
users User[] @relation("CompanyUsers")
|
||||
sessions Session[]
|
||||
imports SessionImport[]
|
||||
companyAiModels CompanyAIModel[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
id String @id @default(uuid())
|
||||
name String
|
||||
csvUrl String // where to fetch CSV
|
||||
csvUsername String? // for basic auth
|
||||
csvPassword String?
|
||||
sentimentAlert Float? // e.g. alert threshold for negative chats
|
||||
dashboardOpts String? // JSON blob for per-company dashboard preferences
|
||||
users User[]
|
||||
sessions Session[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
/**
|
||||
* USER (auth accounts)
|
||||
*/
|
||||
model User {
|
||||
id String @id @default(uuid())
|
||||
email String @unique
|
||||
password String
|
||||
role UserRole @default(USER)
|
||||
|
||||
company Company @relation("CompanyUsers", fields: [companyId], references: [id], onDelete: Cascade)
|
||||
id String @id @default(uuid())
|
||||
email String @unique
|
||||
password String // hashed, use bcrypt
|
||||
company Company @relation(fields: [companyId], references: [id])
|
||||
companyId String
|
||||
|
||||
resetToken String?
|
||||
role String // 'admin' | 'user' | 'auditor'
|
||||
resetToken String?
|
||||
resetTokenExpiry DateTime?
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
/**
|
||||
* SESSION ↔ SESSIONIMPORT (1-to-1)
|
||||
*/
|
||||
|
||||
/**
|
||||
* 1. Normalised session ---------------------------
|
||||
*/
|
||||
model Session {
|
||||
id String @id @default(uuid())
|
||||
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
|
||||
companyId String
|
||||
|
||||
/**
|
||||
* 1-to-1 link back to the import row
|
||||
*/
|
||||
import SessionImport? @relation("ImportToSession", fields: [importId], references: [id])
|
||||
importId String? @unique
|
||||
|
||||
/**
|
||||
* session-level data (processed from SessionImport)
|
||||
*/
|
||||
startTime DateTime
|
||||
endTime DateTime
|
||||
|
||||
// Direct copies from SessionImport (minimal processing)
|
||||
ipAddress String?
|
||||
country String? // from countryCode
|
||||
fullTranscriptUrl String?
|
||||
avgResponseTime Float? // from avgResponseTimeSeconds
|
||||
initialMsg String? // from initialMessage
|
||||
|
||||
// AI-processed fields (calculated from Messages or AI analysis)
|
||||
language String? // AI-detected from Messages
|
||||
messagesSent Int? // Calculated from Message count
|
||||
sentiment SentimentCategory? // AI-analyzed (changed from Float to enum)
|
||||
escalated Boolean? // AI-detected
|
||||
forwardedHr Boolean? // AI-detected
|
||||
category SessionCategory? // AI-categorized (changed to enum)
|
||||
|
||||
// AI-generated fields
|
||||
summary String? // AI-generated summary
|
||||
|
||||
/**
|
||||
* Relationships
|
||||
*/
|
||||
messages Message[] // Individual conversation messages
|
||||
sessionQuestions SessionQuestion[] // Questions asked in this session
|
||||
aiProcessingRequests AIProcessingRequest[] // AI processing cost tracking
|
||||
processingStatus SessionProcessingStatus[] // Processing pipeline status
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([companyId, startTime])
|
||||
}
|
||||
|
||||
/**
|
||||
* 2. Raw CSV row (pure data storage) ----------
|
||||
*/
|
||||
model SessionImport {
|
||||
id String @id @default(uuid())
|
||||
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
|
||||
companyId String
|
||||
|
||||
/**
|
||||
* 1-to-1 back-relation; NO fields/references here
|
||||
*/
|
||||
session Session? @relation("ImportToSession")
|
||||
|
||||
// ─── 16 CSV columns 1-to-1 ────────────────────────
|
||||
externalSessionId String @unique // value from CSV column 1
|
||||
startTimeRaw String
|
||||
endTimeRaw String
|
||||
ipAddress String?
|
||||
countryCode String?
|
||||
language String?
|
||||
messagesSent Int?
|
||||
sentimentRaw String?
|
||||
escalatedRaw String?
|
||||
forwardedHrRaw String?
|
||||
fullTranscriptUrl String?
|
||||
avgResponseTimeSeconds Float?
|
||||
tokens Int?
|
||||
tokensEur Float?
|
||||
category String?
|
||||
initialMessage String?
|
||||
|
||||
// ─── Raw transcript content ─────────────────────────
|
||||
rawTranscriptContent String? // Fetched content from fullTranscriptUrl
|
||||
|
||||
// ─── bookkeeping ─────────────────────────────────
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([companyId, externalSessionId]) // idempotent re-imports
|
||||
}
|
||||
|
||||
/**
|
||||
* MESSAGE (individual lines)
|
||||
*/
|
||||
model Message {
|
||||
id String @id @default(uuid())
|
||||
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
sessionId String
|
||||
|
||||
timestamp DateTime?
|
||||
role String // "user" | "assistant" | "system" – free-form keeps migration easy
|
||||
content String
|
||||
order Int
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([sessionId, order]) // guards against duplicate order values
|
||||
@@index([sessionId, order])
|
||||
}
|
||||
|
||||
/**
|
||||
* UNIFIED PROCESSING STATUS TRACKING
|
||||
*/
|
||||
model SessionProcessingStatus {
|
||||
id String @id @default(uuid())
|
||||
sessionId String
|
||||
stage ProcessingStage
|
||||
status ProcessingStatus @default(PENDING)
|
||||
|
||||
startedAt DateTime?
|
||||
completedAt DateTime?
|
||||
errorMessage String?
|
||||
retryCount Int @default(0)
|
||||
|
||||
// Stage-specific metadata (e.g., AI costs, token usage, fetch details)
|
||||
metadata Json?
|
||||
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([sessionId, stage])
|
||||
@@index([stage, status])
|
||||
@@index([sessionId])
|
||||
}
|
||||
|
||||
/**
|
||||
* QUESTION MANAGEMENT (separate from Session for better analytics)
|
||||
*/
|
||||
model Question {
|
||||
id String @id @default(uuid())
|
||||
content String @unique // The actual question text
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
// Relationships
|
||||
sessionQuestions SessionQuestion[]
|
||||
}
|
||||
|
||||
model SessionQuestion {
|
||||
id String @id @default(uuid())
|
||||
sessionId String
|
||||
questionId String
|
||||
order Int // Order within the session
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
// Relationships
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
question Question @relation(fields: [questionId], references: [id])
|
||||
|
||||
@@unique([sessionId, questionId]) // Prevent duplicate questions per session
|
||||
@@unique([sessionId, order]) // Ensure unique ordering
|
||||
@@index([sessionId])
|
||||
}
|
||||
|
||||
/**
|
||||
* AI PROCESSING COST TRACKING
|
||||
*/
|
||||
model AIProcessingRequest {
|
||||
id String @id @default(uuid())
|
||||
sessionId String
|
||||
|
||||
// OpenAI Request Details
|
||||
openaiRequestId String? // "chatcmpl-Bn8IH9UM8t7luZVWnwZG7CVJ0kjPo"
|
||||
model String // "gpt-4o-2024-08-06"
|
||||
serviceTier String? // "default"
|
||||
systemFingerprint String? // "fp_07871e2ad8"
|
||||
|
||||
// Token Usage (from usage object)
|
||||
promptTokens Int // 11
|
||||
completionTokens Int // 9
|
||||
totalTokens Int // 20
|
||||
|
||||
// Detailed Token Breakdown
|
||||
cachedTokens Int? // prompt_tokens_details.cached_tokens
|
||||
audioTokensPrompt Int? // prompt_tokens_details.audio_tokens
|
||||
reasoningTokens Int? // completion_tokens_details.reasoning_tokens
|
||||
audioTokensCompletion Int? // completion_tokens_details.audio_tokens
|
||||
acceptedPredictionTokens Int? // completion_tokens_details.accepted_prediction_tokens
|
||||
rejectedPredictionTokens Int? // completion_tokens_details.rejected_prediction_tokens
|
||||
|
||||
// Cost Calculation
|
||||
promptTokenCost Float // Cost per prompt token (varies by model)
|
||||
completionTokenCost Float // Cost per completion token (varies by model)
|
||||
totalCostEur Float // Calculated total cost in EUR
|
||||
|
||||
// Processing Context
|
||||
processingType String // "session_analysis", "reprocessing", etc.
|
||||
success Boolean // Whether the request succeeded
|
||||
errorMessage String? // If failed, what went wrong
|
||||
|
||||
// Timestamps
|
||||
requestedAt DateTime @default(now())
|
||||
completedAt DateTime?
|
||||
|
||||
// Relationships
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([sessionId])
|
||||
@@index([requestedAt])
|
||||
@@index([model])
|
||||
}
|
||||
|
||||
/**
|
||||
* AI MODEL MANAGEMENT SYSTEM
|
||||
*/
|
||||
|
||||
/**
|
||||
* AI Model definitions (without pricing)
|
||||
*/
|
||||
model AIModel {
|
||||
id String @id @default(uuid())
|
||||
name String @unique // "gpt-4o", "gpt-4-turbo", etc.
|
||||
provider String // "openai", "anthropic", etc.
|
||||
maxTokens Int? // Maximum tokens for this model
|
||||
isActive Boolean @default(true)
|
||||
|
||||
// Relationships
|
||||
pricing AIModelPricing[]
|
||||
companyModels CompanyAIModel[]
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([provider, isActive])
|
||||
}
|
||||
|
||||
/**
|
||||
* Time-based pricing for AI models
|
||||
*/
|
||||
model AIModelPricing {
|
||||
id String @id @default(uuid())
|
||||
aiModelId String
|
||||
promptTokenCost Float // Cost per prompt token in USD
|
||||
completionTokenCost Float // Cost per completion token in USD
|
||||
effectiveFrom DateTime // When this pricing becomes effective
|
||||
effectiveUntil DateTime? // When this pricing expires (null = current)
|
||||
|
||||
// Relationships
|
||||
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@index([aiModelId, effectiveFrom])
|
||||
@@index([effectiveFrom, effectiveUntil])
|
||||
}
|
||||
|
||||
/**
|
||||
* Company-specific AI model assignments
|
||||
*/
|
||||
model CompanyAIModel {
|
||||
id String @id @default(uuid())
|
||||
companyId String
|
||||
aiModelId String
|
||||
isDefault Boolean @default(false) // Is this the default model for the company?
|
||||
|
||||
// Relationships
|
||||
company Company @relation(fields: [companyId], references: [id], onDelete: Cascade)
|
||||
aiModel AIModel @relation(fields: [aiModelId], references: [id], onDelete: Cascade)
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([companyId, aiModelId]) // Prevent duplicate assignments
|
||||
@@index([companyId, isDefault])
|
||||
id String @id
|
||||
company Company @relation(fields: [companyId], references: [id])
|
||||
companyId String
|
||||
startTime DateTime
|
||||
endTime DateTime
|
||||
ipAddress String?
|
||||
country String?
|
||||
language String?
|
||||
messagesSent Int?
|
||||
sentiment Float?
|
||||
escalated Boolean?
|
||||
forwardedHr Boolean?
|
||||
fullTranscriptUrl String?
|
||||
transcriptContent String? // Added to store the fetched transcript
|
||||
avgResponseTime Float?
|
||||
tokens Int?
|
||||
tokensEur Float?
|
||||
category String?
|
||||
initialMsg String?
|
||||
createdAt DateTime @default(now())
|
||||
}
|
||||
|
||||
39
prisma/seed.js
Normal file
39
prisma/seed.js
Normal file
@ -0,0 +1,39 @@
|
||||
// seed.js - Create initial admin user and company
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function main() {
|
||||
// Create a company
|
||||
const company = await prisma.company.create({
|
||||
data: {
|
||||
name: "Demo Company",
|
||||
csvUrl: "https://example.com/data.csv", // Replace with a real URL if available
|
||||
},
|
||||
});
|
||||
|
||||
// Create an admin user
|
||||
const hashedPassword = await bcrypt.hash("admin123", 10);
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email: "admin@demo.com",
|
||||
password: hashedPassword,
|
||||
role: "admin",
|
||||
companyId: company.id,
|
||||
},
|
||||
});
|
||||
|
||||
console.log("Seed data created successfully:");
|
||||
console.log("Company: Demo Company");
|
||||
console.log("Admin user: admin@demo.com (password: admin123)");
|
||||
}
|
||||
|
||||
main()
|
||||
.catch((e) => {
|
||||
console.error("Error seeding database:", e);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(async () => {
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user