5 Commits

Author SHA1 Message Date
adea8ae6b7 Refactor error payload logic 2025-06-10 00:27:54 +02:00
ef8601dd72 chore: secure error response 2025-06-10 00:17:24 +02:00
5aaca6de99 Potential fix for code scanning alert no. 2: Workflow does not contain permissions (#6)
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
2025-06-10 00:11:10 +02:00
71c8aff125 Implement Cloudflare D1 support with Prisma, update scripts, and enhance documentation 2025-06-01 05:22:44 +02:00
0c18e8be57 Add initial wrangler configuration for livedash-node project
- Created wrangler.json with project metadata and settings
- Configured D1 database binding for database interaction
- Enabled observability for monitoring
- Added placeholders for smart placement, environment variables, static assets, and service bindings
2025-06-01 04:51:57 +02:00
32 changed files with 15356 additions and 9728 deletions

View File

@ -1,4 +1,6 @@
name: Playwright Tests name: Playwright Tests
permissions:
contents: read
on: on:
push: push:
branches: [main, master] branches: [main, master]

186
.gitignore vendored
View File

@ -261,3 +261,189 @@ Thumbs.db
/playwright-report/ /playwright-report/
/blob-report/ /blob-report/
/playwright/.cache/ /playwright/.cache/
# Created by https://www.toptal.com/developers/gitignore/api/node,macos
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Moved from ./templates for ignoring all locks in templates
templates/**/*-lock.*
templates/**/*.lock
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
# End of https://www.toptal.com/developers/gitignore/api/node,macos
# Wrangler output
.wrangler/
build/
# Turbo output
.turbo/
.dev.vars*
test-transcript-format.js

54
.prettierignore Normal file
View File

@ -0,0 +1,54 @@
# Dependencies
node_modules/
.pnpm-store/
# Build outputs
.next/
out/
dist/
build/
# Environment files
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# Database
*.db
*.sqlite
prisma/migrations/
# IDE
.vscode/
.idea/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Git
.git/
# Coverage reports
coverage/
# Playwright
test-results/
playwright-report/
playwright/.cache/
# Generated files
*.generated.*
pnpm-lock.yaml

106
README.md
View File

@ -2,65 +2,65 @@
A real-time analytics dashboard for monitoring user sessions and interactions with interactive data visualizations and detailed metrics. A real-time analytics dashboard for monitoring user sessions and interactions with interactive data visualizations and detailed metrics.
![Next.js](https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22next%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=nextdotjs&label=Nextjs&color=%23000000) ![Next.js](<https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22next%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=nextdotjs&label=Nextjs&color=%23000000>)
![React](https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22react%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=react&label=React&color=%2361DAFB) ![React](<https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22react%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=react&label=React&color=%2361DAFB>)
![TypeScript](https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22typescript%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=typescript&label=TypeScript&color=%233178C6) ![TypeScript](<https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22typescript%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=typescript&label=TypeScript&color=%233178C6>)
![Prisma](https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22prisma%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=prisma&label=Prisma&color=%232D3748) ![Prisma](<https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22prisma%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=prisma&label=Prisma&color=%232D3748>)
![TailwindCSS](https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22tailwindcss%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=tailwindcss&label=TailwindCSS&color=%2306B6D4) ![TailwindCSS](<https://img.shields.io/badge/dynamic/regex?url=https%3A%2F%2Fraw.githubusercontent.com%2Fkjanat%2Flivedash-node%2Fmaster%2Fpackage.json&search=%22tailwindcss%22%5Cs*%3A%5Cs*%22%5C%5E(%3F%3Cversion%3E%5Cd%2B%5C.%5Cd*).*%22&replace=%24%3Cversion%3E&logo=tailwindcss&label=TailwindCSS&color=%2306B6D4>)
## Features ## Features
- **Real-time Session Monitoring**: Track and analyze user sessions as they happen - **Real-time Session Monitoring**: Track and analyze user sessions as they happen
- **Interactive Visualizations**: Geographic maps, response time distributions, and more - **Interactive Visualizations**: Geographic maps, response time distributions, and more
- **Advanced Analytics**: Detailed metrics and insights about user behavior - **Advanced Analytics**: Detailed metrics and insights about user behavior
- **User Management**: Secure authentication with role-based access control - **User Management**: Secure authentication with role-based access control
- **Customizable Dashboard**: Filter and sort data based on your specific needs - **Customizable Dashboard**: Filter and sort data based on your specific needs
- **Session Details**: In-depth analysis of individual user sessions - **Session Details**: In-depth analysis of individual user sessions
## Tech Stack ## Tech Stack
- **Frontend**: React 19, Next.js 15, TailwindCSS 4 - **Frontend**: React 19, Next.js 15, TailwindCSS 4
- **Backend**: Next.js API Routes, Node.js - **Backend**: Next.js API Routes, Node.js
- **Database**: Prisma ORM with SQLite (default), compatible with PostgreSQL - **Database**: Prisma ORM with SQLite (default), compatible with PostgreSQL
- **Authentication**: NextAuth.js - **Authentication**: NextAuth.js
- **Visualization**: Chart.js, D3.js, React Leaflet - **Visualization**: Chart.js, D3.js, React Leaflet
- **Data Processing**: Node-cron for scheduled tasks - **Data Processing**: Node-cron for scheduled tasks
## Getting Started ## Getting Started
### Prerequisites ### Prerequisites
- Node.js (LTS version recommended) - Node.js (LTS version recommended)
- npm or yarn - npm or yarn
### Installation ### Installation
1. Clone this repository: 1. Clone this repository:
```bash ```bash
git clone https://github.com/kjanat/livedash-node.git git clone https://github.com/kjanat/livedash-node.git
cd livedash-node cd livedash-node
``` ```
2. Install dependencies: 2. Install dependencies:
```bash ```bash
npm install npm install
``` ```
3. Set up the database: 3. Set up the database:
```bash ```bash
npm run prisma:generate npm run prisma:generate
npm run prisma:migrate npm run prisma:migrate
npm run prisma:seed npm run prisma:seed
``` ```
4. Start the development server: 4. Start the development server:
```bash ```bash
npm run dev npm run dev
``` ```
5. Open your browser and navigate to <http://localhost:3000> 5. Open your browser and navigate to <http://localhost:3000>
@ -76,22 +76,22 @@ NEXTAUTH_SECRET=your-secret-here
## Project Structure ## Project Structure
- `app/`: Next.js App Router components and pages - `app/`: Next.js App Router components and pages
- `components/`: Reusable React components - `components/`: Reusable React components
- `lib/`: Utility functions and shared code - `lib/`: Utility functions and shared code
- `pages/`: API routes and server-side code - `pages/`: API routes and server-side code
- `prisma/`: Database schema and migrations - `prisma/`: Database schema and migrations
- `public/`: Static assets - `public/`: Static assets
- `docs/`: Project documentation - `docs/`: Project documentation
## Available Scripts ## Available Scripts
- `npm run dev`: Start the development server - `npm run dev`: Start the development server
- `npm run build`: Build the application for production - `npm run build`: Build the application for production
- `npm run start`: Run the production build - `npm run start`: Run the production build
- `npm run lint`: Run ESLint - `npm run lint`: Run ESLint
- `npm run format`: Format code with Prettier - `npm run format`: Format code with Prettier
- `npm run prisma:studio`: Open Prisma Studio to view database - `npm run prisma:studio`: Open Prisma Studio to view database
## Contributing ## Contributing
@ -107,9 +107,9 @@ This project is not licensed for commercial use without explicit permission. Fre
## Acknowledgments ## Acknowledgments
- [Next.js](https://nextjs.org/) - [Next.js](https://nextjs.org/)
- [Prisma](https://prisma.io/) - [Prisma](https://prisma.io/)
- [TailwindCSS](https://tailwindcss.com/) - [TailwindCSS](https://tailwindcss.com/)
- [Chart.js](https://www.chartjs.org/) - [Chart.js](https://www.chartjs.org/)
- [D3.js](https://d3js.org/) - [D3.js](https://d3js.org/)
- [React Leaflet](https://react-leaflet.js.org/) - [React Leaflet](https://react-leaflet.js.org/)

130
TODO.md
View File

@ -2,95 +2,107 @@
## Dashboard Integration ## Dashboard Integration
- [ ] **Resolve `GeographicMap.tsx` and `ResponseTimeDistribution.tsx` data simulation** - [ ] **Resolve `GeographicMap.tsx` and `ResponseTimeDistribution.tsx` data simulation**
- Investigate integrating real data sources with server-side analytics - Investigate integrating real data sources with server-side analytics
- Replace simulated data mentioned in `docs/dashboard-components.md` - Replace simulated data mentioned in `docs/dashboard-components.md`
## Component Specific ## Component Specific
- [ ] **Implement robust emailing of temporary passwords** - [ ] **Implement robust emailing of temporary passwords**
- File: `pages/api/dashboard/users.ts`
- Set up proper email service integration
- [x] **Session page improvements** - File: `pages/api/dashboard/users.ts`
- File: `app/dashboard/sessions/page.tsx` - Set up proper email service integration
- Implemented pagination, advanced filtering, and sorting
- [x] **Session page improvements**
- File: `app/dashboard/sessions/page.tsx`
- Implemented pagination, advanced filtering, and sorting
## File Cleanup ## File Cleanup
- [x] **Remove backup files** - [x] **Remove backup files**
- Reviewed and removed `.bak` and `.new` files after integration - Reviewed and removed `.bak` and `.new` files after integration
- Cleaned up `GeographicMap.tsx.bak`, `SessionDetails.tsx.bak`, `SessionDetails.tsx.new` - Cleaned up `GeographicMap.tsx.bak`, `SessionDetails.tsx.bak`, `SessionDetails.tsx.new`
## Database Schema Improvements ## Database Schema Improvements
- [ ] **Update EndTime field** - [ ] **Update EndTime field**
- Make `endTime` field nullable in Prisma schema to match TypeScript interfaces
- [ ] **Add database indices** - Make `endTime` field nullable in Prisma schema to match TypeScript interfaces
- Add appropriate indices to improve query performance
- Focus on dashboard metrics and session listing queries
- [ ] **Implement production email service** - [ ] **Add database indices**
- Replace console logging in `lib/sendEmail.ts`
- Consider providers: Nodemailer, SendGrid, AWS SES - Add appropriate indices to improve query performance
- Focus on dashboard metrics and session listing queries
- [ ] **Implement production email service**
- Replace console logging in `lib/sendEmail.ts`
- Consider providers: Nodemailer, SendGrid, AWS SES
## General Enhancements & Features ## General Enhancements & Features
- [ ] **Real-time updates** - [ ] **Real-time updates**
- Implement for dashboard and session list
- Consider WebSockets or Server-Sent Events
- [ ] **Data export functionality** - Implement for dashboard and session list
- Allow users (especially admins) to export session data - Consider WebSockets or Server-Sent Events
- Support CSV format initially
- [ ] **Customizable dashboard** - [ ] **Data export functionality**
- Allow users to customize dashboard view
- Let users choose which metrics/charts are most important - Allow users (especially admins) to export session data
- Support CSV format initially
- [ ] **Customizable dashboard**
- Allow users to customize dashboard view
- Let users choose which metrics/charts are most important
## Testing & Quality Assurance ## Testing & Quality Assurance
- [ ] **Comprehensive testing suite** - [ ] **Comprehensive testing suite**
- [ ] Unit tests for utility functions and API logic
- [ ] Integration tests for API endpoints with database
- [ ] End-to-end tests for user flows (Playwright or Cypress)
- [ ] **Error monitoring and logging** - [ ] Unit tests for utility functions and API logic
- Integrate robust error monitoring service (Sentry) - [ ] Integration tests for API endpoints with database
- Enhance server-side logging - [ ] End-to-end tests for user flows (Playwright or Cypress)
- [ ] **Accessibility improvements** - [ ] **Error monitoring and logging**
- Review application against WCAG guidelines
- Improve keyboard navigation and screen reader compatibility - Integrate robust error monitoring service (Sentry)
- Check color contrast ratios - Enhance server-side logging
- [ ] **Accessibility improvements**
- Review application against WCAG guidelines
- Improve keyboard navigation and screen reader compatibility
- Check color contrast ratios
## Security Enhancements ## Security Enhancements
- [x] **Password reset functionality** - [x] **Password reset functionality**
- Implemented secure password reset mechanism
- Files: `app/forgot-password/page.tsx`, `app/reset-password/page.tsx`, `pages/api/forgot-password.ts`, `pages/api/reset-password.ts`
- [ ] **Two-Factor Authentication (2FA)** - Implemented secure password reset mechanism
- Consider adding 2FA, especially for admin accounts - Files: `app/forgot-password/page.tsx`, `app/reset-password/page.tsx`, `pages/api/forgot-password.ts`, `pages/api/reset-password.ts`
- [ ] **Input validation and sanitization** - [ ] **Two-Factor Authentication (2FA)**
- Review all user inputs (API request bodies, query parameters)
- Ensure proper validation and sanitization - Consider adding 2FA, especially for admin accounts
- [ ] **Input validation and sanitization**
- Review all user inputs (API request bodies, query parameters)
- Ensure proper validation and sanitization
## Code Quality & Development ## Code Quality & Development
- [ ] **Code review process** - [ ] **Code review process**
- Enforce code reviews for all changes
- [ ] **Environment configuration** - Enforce code reviews for all changes
- Ensure secure management of environment-specific configurations
- [ ] **Dependency management** - [ ] **Environment configuration**
- Periodically review dependencies for vulnerabilities
- Keep dependencies updated
- [ ] **Documentation updates** - Ensure secure management of environment-specific configurations
- [ ] Ensure `docs/dashboard-components.md` reflects actual implementations
- [ ] Verify "Dashboard Enhancements" are consistently applied - [ ] **Dependency management**
- [ ] Update documentation for improved layout and visual hierarchies
- Periodically review dependencies for vulnerabilities
- Keep dependencies updated
- [ ] **Documentation updates**
- [ ] Ensure `docs/dashboard-components.md` reflects actual implementations
- [ ] Verify "Dashboard Enhancements" are consistently applied
- [ ] Update documentation for improved layout and visual hierarchies

View File

@ -4,6 +4,10 @@ import { useState, useEffect } from "react";
import { useSession } from "next-auth/react"; import { useSession } from "next-auth/react";
import { Company } from "../../../lib/types"; import { Company } from "../../../lib/types";
interface CompanyConfigResponse {
company: Company;
}
export default function CompanySettingsPage() { export default function CompanySettingsPage() {
const { data: session, status } = useSession(); const { data: session, status } = useSession();
// We store the full company object for future use and updates after save operations // We store the full company object for future use and updates after save operations
@ -22,7 +26,7 @@ export default function CompanySettingsPage() {
setLoading(true); setLoading(true);
try { try {
const res = await fetch("/api/dashboard/config"); const res = await fetch("/api/dashboard/config");
const data = await res.json(); const data = (await res.json()) as CompanyConfigResponse;
setCompany(data.company); setCompany(data.company);
setCsvUrl(data.company.csvUrl || ""); setCsvUrl(data.company.csvUrl || "");
setCsvUsername(data.company.csvUsername || ""); setCsvUsername(data.company.csvUsername || "");
@ -58,10 +62,10 @@ export default function CompanySettingsPage() {
if (res.ok) { if (res.ok) {
setMessage("Settings saved successfully!"); setMessage("Settings saved successfully!");
// Update local state if needed // Update local state if needed
const data = await res.json(); const data = (await res.json()) as CompanyConfigResponse;
setCompany(data.company); setCompany(data.company);
} else { } else {
const error = await res.json(); const error = (await res.json()) as { message?: string; };
setMessage( setMessage(
`Failed to save settings: ${error.message || "Unknown error"}` `Failed to save settings: ${error.message || "Unknown error"}`
); );

View File

@ -17,6 +17,11 @@ import GeographicMap from "../../../components/GeographicMap";
import ResponseTimeDistribution from "../../../components/ResponseTimeDistribution"; import ResponseTimeDistribution from "../../../components/ResponseTimeDistribution";
import WelcomeBanner from "../../../components/WelcomeBanner"; import WelcomeBanner from "../../../components/WelcomeBanner";
interface MetricsApiResponse {
metrics: MetricsResult;
company: Company;
}
// Safely wrapped component with useSession // Safely wrapped component with useSession
function DashboardContent() { function DashboardContent() {
const { data: session, status } = useSession(); // Add status from useSession const { data: session, status } = useSession(); // Add status from useSession
@ -40,7 +45,7 @@ function DashboardContent() {
const fetchData = async () => { const fetchData = async () => {
setLoading(true); setLoading(true);
const res = await fetch("/api/dashboard/metrics"); const res = await fetch("/api/dashboard/metrics");
const data = await res.json(); const data = (await res.json()) as MetricsApiResponse;
console.log("Metrics from API:", { console.log("Metrics from API:", {
avgSessionLength: data.metrics.avgSessionLength, avgSessionLength: data.metrics.avgSessionLength,
avgSessionTimeTrend: data.metrics.avgSessionTimeTrend, avgSessionTimeTrend: data.metrics.avgSessionTimeTrend,
@ -76,10 +81,10 @@ function DashboardContent() {
if (res.ok) { if (res.ok) {
// Refetch metrics // Refetch metrics
const metricsRes = await fetch("/api/dashboard/metrics"); const metricsRes = await fetch("/api/dashboard/metrics");
const data = await metricsRes.json(); const data = (await metricsRes.json()) as MetricsApiResponse;
setMetrics(data.metrics); setMetrics(data.metrics);
} else { } else {
const errorData = await res.json(); const errorData = (await res.json()) as { error: string; };
alert(`Failed to refresh sessions: ${errorData.error}`); alert(`Failed to refresh sessions: ${errorData.error}`);
} }
} finally { } finally {

View File

@ -8,6 +8,10 @@ import TranscriptViewer from "../../../../components/TranscriptViewer";
import { ChatSession } from "../../../../lib/types"; import { ChatSession } from "../../../../lib/types";
import Link from "next/link"; import Link from "next/link";
interface SessionApiResponse {
session: ChatSession;
}
export default function SessionViewPage() { export default function SessionViewPage() {
const params = useParams(); const params = useParams();
const router = useRouter(); // Initialize useRouter const router = useRouter(); // Initialize useRouter
@ -30,13 +34,13 @@ export default function SessionViewPage() {
try { try {
const response = await fetch(`/api/dashboard/session/${id}`); const response = await fetch(`/api/dashboard/session/${id}`);
if (!response.ok) { if (!response.ok) {
const errorData = await response.json(); const errorData = (await response.json()) as { error: string; };
throw new Error( throw new Error(
errorData.error || errorData.error ||
`Failed to fetch session: ${response.statusText}` `Failed to fetch session: ${response.statusText}`
); );
} }
const data = await response.json(); const data = (await response.json()) as SessionApiResponse;
setSession(data.session); setSession(data.session);
} catch (err) { } catch (err) {
setError( setError(
@ -150,16 +154,17 @@ export default function SessionViewPage() {
<p className="text-gray-600"> <p className="text-gray-600">
No transcript content available for this session. No transcript content available for this session.
</p> </p>
{session.fullTranscriptUrl && ( {session.fullTranscriptUrl &&
<a process.env.NODE_ENV !== "production" && (
href={session.fullTranscriptUrl} <a
target="_blank" href={session.fullTranscriptUrl}
rel="noopener noreferrer" target="_blank"
className="text-sky-600 hover:underline mt-2 inline-block" rel="noopener noreferrer"
> className="text-sky-600 hover:underline mt-2 inline-block"
View Source Transcript URL >
</a> View Source Transcript URL
)} </a>
)}
</div> </div>
)} )}
</div> </div>

View File

@ -14,6 +14,11 @@ interface FilterOptions {
languages: string[]; languages: string[];
} }
interface SessionsApiResponse {
sessions: ChatSession[];
totalSessions: number;
}
export default function SessionsPage() { export default function SessionsPage() {
const [sessions, setSessions] = useState<ChatSession[]>([]); const [sessions, setSessions] = useState<ChatSession[]>([]);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
@ -58,7 +63,7 @@ export default function SessionsPage() {
if (!response.ok) { if (!response.ok) {
throw new Error("Failed to fetch filter options"); throw new Error("Failed to fetch filter options");
} }
const data = await response.json(); const data = (await response.json()) as FilterOptions;
setFilterOptions(data); setFilterOptions(data);
} catch (err) { } catch (err) {
setError( setError(
@ -88,7 +93,7 @@ export default function SessionsPage() {
if (!response.ok) { if (!response.ok) {
throw new Error(`Failed to fetch sessions: ${response.statusText}`); throw new Error(`Failed to fetch sessions: ${response.statusText}`);
} }
const data = await response.json(); const data = (await response.json()) as SessionsApiResponse;
setSessions(data.sessions || []); setSessions(data.sessions || []);
setTotalPages(Math.ceil((data.totalSessions || 0) / pageSize)); setTotalPages(Math.ceil((data.totalSessions || 0) / pageSize));
} catch (err) { } catch (err) {

View File

@ -12,6 +12,10 @@ interface UserManagementProps {
session: UserSession; session: UserSession;
} }
interface UsersApiResponse {
users: UserItem[];
}
export default function UserManagement({ session }: UserManagementProps) { export default function UserManagement({ session }: UserManagementProps) {
const [users, setUsers] = useState<UserItem[]>([]); const [users, setUsers] = useState<UserItem[]>([]);
const [email, setEmail] = useState<string>(""); const [email, setEmail] = useState<string>("");
@ -21,7 +25,7 @@ export default function UserManagement({ session }: UserManagementProps) {
useEffect(() => { useEffect(() => {
fetch("/api/dashboard/users") fetch("/api/dashboard/users")
.then((r) => r.json()) .then((r) => r.json())
.then((data) => setUsers(data.users)); .then((data) => setUsers((data as UsersApiResponse).users));
}, []); }, []);
async function inviteUser() { async function inviteUser() {

View File

@ -9,6 +9,10 @@ interface UserItem {
role: string; role: string;
} }
interface UsersApiResponse {
users: UserItem[];
}
export default function UserManagementPage() { export default function UserManagementPage() {
const { data: session, status } = useSession(); const { data: session, status } = useSession();
const [users, setUsers] = useState<UserItem[]>([]); const [users, setUsers] = useState<UserItem[]>([]);
@ -27,7 +31,7 @@ export default function UserManagementPage() {
setLoading(true); setLoading(true);
try { try {
const res = await fetch("/api/dashboard/users"); const res = await fetch("/api/dashboard/users");
const data = await res.json(); const data = (await res.json()) as UsersApiResponse;
setUsers(data.users); setUsers(data.users);
} catch (error) { } catch (error) {
console.error("Failed to fetch users:", error); console.error("Failed to fetch users:", error);
@ -52,7 +56,7 @@ export default function UserManagementPage() {
// Refresh the user list // Refresh the user list
fetchUsers(); fetchUsers();
} else { } else {
const error = await res.json(); const error = (await res.json()) as { message?: string; };
setMessage( setMessage(
`Failed to invite user: ${error.message || "Unknown error"}` `Failed to invite user: ${error.message || "Unknown error"}`
); );

View File

@ -146,7 +146,8 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
{/* Fallback to link only if we only have the URL but no content - this might also be redundant if parent handles all transcript display */} {/* Fallback to link only if we only have the URL but no content - this might also be redundant if parent handles all transcript display */}
{(!session.transcriptContent || {(!session.transcriptContent ||
session.transcriptContent.length === 0) && session.transcriptContent.length === 0) &&
session.fullTranscriptUrl && ( session.fullTranscriptUrl &&
process.env.NODE_ENV !== "production" && (
<div className="flex justify-between pt-2"> <div className="flex justify-between pt-2">
<span className="text-gray-600">Transcript:</span> <span className="text-gray-600">Transcript:</span>
<a <a

View File

@ -2,7 +2,7 @@
import { useState } from "react"; import { useState } from "react";
import ReactMarkdown from "react-markdown"; import ReactMarkdown from "react-markdown";
import rehypeRaw from "rehype-raw"; // Import rehype-raw import rehypeRaw from "rehype-raw";
interface TranscriptViewerProps { interface TranscriptViewerProps {
transcriptContent: string; transcriptContent: string;
@ -23,6 +23,7 @@ function formatTranscript(content: string): React.ReactNode[] {
const elements: React.ReactNode[] = []; const elements: React.ReactNode[] = [];
let currentSpeaker: string | null = null; let currentSpeaker: string | null = null;
let currentMessages: string[] = []; let currentMessages: string[] = [];
let currentTimestamp: string | null = null;
// Process each line // Process each line
lines.forEach((line) => { lines.forEach((line) => {
@ -32,8 +33,15 @@ function formatTranscript(content: string): React.ReactNode[] {
return; return;
} }
// Check if this is a new speaker line // Check if this is a new speaker line with or without datetime
if (line.startsWith("User:") || line.startsWith("Assistant:")) { // Format 1: [29.05.2025 21:26:44] User: message
// Format 2: User: message
const datetimeMatch = line.match(
/^\[([^\]]+)\]\s*(User|Assistant):\s*(.*)$/
);
const simpleMatch = line.match(/^(User|Assistant):\s*(.*)$/);
if (datetimeMatch || simpleMatch) {
// If we have accumulated messages for a previous speaker, add them // If we have accumulated messages for a previous speaker, add them
if (currentSpeaker && currentMessages.length > 0) { if (currentSpeaker && currentMessages.length > 0) {
elements.push( elements.push(
@ -48,6 +56,11 @@ function formatTranscript(content: string): React.ReactNode[] {
: "bg-gray-100 text-gray-800" : "bg-gray-100 text-gray-800"
}`} }`}
> >
{currentTimestamp && (
<div className="text-xs opacity-60 mb-1">
{currentTimestamp}
</div>
)}
{currentMessages.map((msg, i) => ( {currentMessages.map((msg, i) => (
// Use ReactMarkdown to render each message part // Use ReactMarkdown to render each message part
<ReactMarkdown <ReactMarkdown
@ -73,12 +86,22 @@ function formatTranscript(content: string): React.ReactNode[] {
currentMessages = []; currentMessages = [];
} }
// Set the new current speaker if (datetimeMatch) {
currentSpeaker = line.startsWith("User:") ? "User" : "Assistant"; // Format with datetime: [29.05.2025 21:26:44] User: message
// Add the content after "User:" or "Assistant:" currentTimestamp = datetimeMatch[1];
const messageContent = line.substring(line.indexOf(":") + 1).trim(); currentSpeaker = datetimeMatch[2];
if (messageContent) { const messageContent = datetimeMatch[3].trim();
currentMessages.push(messageContent); if (messageContent) {
currentMessages.push(messageContent);
}
} else if (simpleMatch) {
// Format without datetime: User: message
currentTimestamp = null;
currentSpeaker = simpleMatch[1];
const messageContent = simpleMatch[2].trim();
if (messageContent) {
currentMessages.push(messageContent);
}
} }
} else if (currentSpeaker) { } else if (currentSpeaker) {
// This is a continuation of the current speaker's message // This is a continuation of the current speaker's message
@ -100,6 +123,9 @@ function formatTranscript(content: string): React.ReactNode[] {
: "bg-gray-100 text-gray-800" : "bg-gray-100 text-gray-800"
}`} }`}
> >
{currentTimestamp && (
<div className="text-xs opacity-60 mb-1">{currentTimestamp}</div>
)}
{currentMessages.map((msg, i) => ( {currentMessages.map((msg, i) => (
// Use ReactMarkdown to render each message part // Use ReactMarkdown to render each message part
<ReactMarkdown <ReactMarkdown
@ -138,6 +164,9 @@ export default function TranscriptViewer({
const formattedElements = formatTranscript(transcriptContent); const formattedElements = formatTranscript(transcriptContent);
// Hide "View Full Raw" button in production environment
const isProduction = process.env.NODE_ENV === "production";
return ( return (
<div className="bg-white shadow-lg rounded-lg p-4 md:p-6 mt-6"> <div className="bg-white shadow-lg rounded-lg p-4 md:p-6 mt-6">
<div className="flex justify-between items-center mb-4"> <div className="flex justify-between items-center mb-4">
@ -145,7 +174,7 @@ export default function TranscriptViewer({
Session Transcript Session Transcript
</h2> </h2>
<div className="flex items-center space-x-3"> <div className="flex items-center space-x-3">
{transcriptUrl && ( {transcriptUrl && !isProduction && (
<a <a
href={transcriptUrl} href={transcriptUrl}
target="_blank" target="_blank"

227
docs/D1_CLI_ACCESS.md Normal file
View File

@ -0,0 +1,227 @@
# D1 Database Command Line Access
This guide shows you how to access and manage your Cloudflare D1 database `d1-notso-livedash` from the command line.
## Quick Reference
### Using the Custom D1 CLI Script
```bash
# Simple and fast commands
pnpm d1 tables # List all tables
pnpm d1 info # Database information
pnpm d1 schema User # Show table schema
pnpm d1 query "SELECT COUNT(*) FROM User" # Execute SQL
pnpm d1 export backup.sql # Export database
# Remote (production) commands
pnpm d1 --remote info # Production database info
pnpm d1 --remote query "SELECT * FROM Company LIMIT 5"
```
### Using Package.json Scripts
```bash
# Database information
pnpm d1:list # List all D1 databases
pnpm d1:info # Local database info
pnpm d1:info:remote # Remote database info
# Backup and export
pnpm d1:export # Export local database
pnpm d1:export:remote # Export remote database
pnpm d1:schema # Export schema only
```
### Direct Wrangler Commands
```bash
# Basic operations
npx wrangler d1 list
npx wrangler d1 info d1-notso-livedash
npx wrangler d1 execute d1-notso-livedash --command "SELECT * FROM User"
# Remote operations (add --remote flag)
npx wrangler d1 info d1-notso-livedash --remote
npx wrangler d1 execute d1-notso-livedash --remote --command "SELECT COUNT(*) FROM Company"
```
## Database Schema
Your D1 database contains these tables:
### Company Table
```sql
- id (TEXT, PRIMARY KEY)
- name (TEXT, NOT NULL)
- csvUrl (TEXT, NOT NULL)
- csvUsername (TEXT)
- csvPassword (TEXT)
- sentimentAlert (REAL)
- dashboardOpts (TEXT)
- createdAt (DATETIME, NOT NULL, DEFAULT CURRENT_TIMESTAMP)
- updatedAt (DATETIME, NOT NULL)
```
### User Table
```sql
- id (TEXT, PRIMARY KEY)
- email (TEXT, NOT NULL)
- password (TEXT, NOT NULL)
- companyId (TEXT, NOT NULL)
- role (TEXT, NOT NULL)
- resetToken (TEXT)
- resetTokenExpiry (DATETIME)
```
### Session Table
```sql
- id (TEXT, PRIMARY KEY)
- userId (TEXT, NOT NULL)
- expiresAt (DATETIME, NOT NULL)
```
## Common SQL Queries
### Data Exploration
```sql
-- Check table sizes
SELECT 'Company' as table_name, COUNT(*) as count FROM Company
UNION ALL
SELECT 'User' as table_name, COUNT(*) as count FROM User
UNION ALL
SELECT 'Session' as table_name, COUNT(*) as count FROM Session;
-- Show all table names
SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;
-- Get table schema
PRAGMA table_info(User);
```
### Business Queries
```sql
-- List companies with user counts
SELECT c.name, c.id, COUNT(u.id) as user_count
FROM Company c
LEFT JOIN User u ON c.id = u.companyId
GROUP BY c.id, c.name;
-- Find admin users
SELECT u.email, c.name as company
FROM User u
JOIN Company c ON u.companyId = c.id
WHERE u.role = 'admin';
-- Active sessions
SELECT COUNT(*) as active_sessions
FROM Session
WHERE expiresAt > datetime('now');
```
## Local vs Remote Databases
- **Local Database**: Located at `.wrangler/state/v3/d1/` (for development)
- **Remote Database**: Cloudflare's production D1 database
### When to Use Each:
- **Local**: Development, testing, safe experimentation
- **Remote**: Production data, deployment verification
## Database Statistics
Current database info:
- **Database ID**: d4ee7efe-d37a-48e4-bed7-fdfaa5108131
- **Region**: WEUR (Western Europe)
- **Size**: ~53.2 kB
- **Tables**: 6 (including system tables)
- **Read Queries (24h)**: 65
- **Write Queries (24h)**: 8
## Scripts Available
### `/scripts/d1.js` (Recommended)
Simple, fast CLI for common operations:
```bash
node scripts/d1.js tables
node scripts/d1.js schema User
node scripts/d1.js query "SELECT * FROM Company"
node scripts/d1.js --remote info
```
### `/scripts/d1-query.js`
Simple query executor:
```bash
node scripts/d1-query.js "SELECT COUNT(*) FROM User"
node scripts/d1-query.js --remote "SELECT * FROM Company"
```
### `/scripts/d1-manager.js`
Comprehensive database management (if needed for advanced operations):
```bash
node scripts/d1-manager.js info
node scripts/d1-manager.js backup
```
## Backup and Recovery
### Create Backups
```bash
# Quick backup
pnpm d1 export backup_$(date +%Y%m%d).sql
# Automated backup with timestamp
npx wrangler d1 export d1-notso-livedash --output backups/backup_$(date +%Y%m%d_%H%M%S).sql
# Schema only backup
npx wrangler d1 export d1-notso-livedash --no-data --output schema.sql
```
### Restore from Backup
```bash
# Apply SQL file to database
npx wrangler d1 execute d1-notso-livedash --file backup.sql
```
## Troubleshooting
### Common Issues
1. **"wrangler not found"**: Use `npx wrangler` instead of `wrangler`
2. **Permission denied**: Ensure you're logged into Cloudflare: `npx wrangler login`
3. **Database not found**: Check `wrangler.json` for correct binding name
### Debug Commands
```bash
# Check Wrangler authentication
npx wrangler whoami
# Verify database configuration
npx wrangler d1 list
# Test database connectivity
npx wrangler d1 execute d1-notso-livedash --command "SELECT 1"
```
## Security Notes
- Local database is for development only
- Never expose production database credentials
- Use `--remote` flag carefully in production
- Regular backups are recommended for production data

View File

@ -1,5 +1,6 @@
// Simple Prisma client setup // Prisma client setup with support for Cloudflare D1
import { PrismaClient } from "@prisma/client"; import { PrismaClient } from "@prisma/client";
import { PrismaD1 } from "@prisma/adapter-d1";
// Add prisma to the NodeJS global type // Add prisma to the NodeJS global type
// This approach avoids NodeJS.Global which is not available // This approach avoids NodeJS.Global which is not available
@ -9,12 +10,24 @@ declare const global: {
prisma: PrismaClient | undefined; prisma: PrismaClient | undefined;
}; };
// Initialize Prisma Client // Check if we're running in Cloudflare Workers environment
const prisma = global.prisma || new PrismaClient(); const isCloudflareWorker = typeof globalThis.DB !== 'undefined';
// Save in global if we're in development // Initialize Prisma Client
if (process.env.NODE_ENV !== "production") { let prisma: PrismaClient;
global.prisma = prisma;
if (isCloudflareWorker) {
// In Cloudflare Workers, use D1 adapter
const adapter = new PrismaD1(globalThis.DB);
prisma = new PrismaClient({ adapter });
} else {
// In Next.js/Node.js, use regular SQLite
prisma = global.prisma || new PrismaClient();
// Save in global if we're in development
if (process.env.NODE_ENV !== "production") {
global.prisma = prisma;
}
} }
export { prisma }; export { prisma };

View File

@ -10,6 +10,41 @@ interface SessionCreateData {
[key: string]: unknown; [key: string]: unknown;
} }
/**
* Fetches transcript content from a URL with optional authentication
* @param url The URL to fetch the transcript from
* @param username Optional username for Basic Auth
* @param password Optional password for Basic Auth
* @returns The transcript content or null if fetching fails
*/
async function fetchTranscriptContent(
url: string,
username?: string,
password?: string
): Promise<string | null> {
try {
const authHeader =
username && password
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
: undefined;
const response = await fetch(url, {
headers: authHeader ? { Authorization: authHeader } : {},
});
if (!response.ok) {
process.stderr.write(
`Error fetching transcript: ${response.statusText}\n`
);
return null;
}
return await response.text();
} catch (error) {
process.stderr.write(`Failed to fetch transcript: ${error}\n`);
return null;
}
}
export function startScheduler() { export function startScheduler() {
cron.schedule("*/15 * * * *", async () => { cron.schedule("*/15 * * * *", async () => {
const companies = await prisma.company.findMany(); const companies = await prisma.company.findMany();
@ -23,6 +58,16 @@ export function startScheduler() {
await prisma.session.deleteMany({ where: { companyId: company.id } }); await prisma.session.deleteMany({ where: { companyId: company.id } });
for (const session of sessions) { for (const session of sessions) {
// Fetch transcript content if URL is available
let transcriptContent: string | null = null;
if (session.fullTranscriptUrl) {
transcriptContent = await fetchTranscriptContent(
session.fullTranscriptUrl,
company.csvUsername as string | undefined,
company.csvPassword as string | undefined
);
}
const sessionData: SessionCreateData = { const sessionData: SessionCreateData = {
...session, ...session,
companyId: company.id, companyId: company.id,
@ -51,6 +96,8 @@ export function startScheduler() {
? session.messagesSent ? session.messagesSent
: 0, : 0,
category: session.category || null, category: session.category || null,
fullTranscriptUrl: session.fullTranscriptUrl || null,
transcriptContent: transcriptContent, // Add the transcript content
}, },
}); });
} }

View File

@ -0,0 +1,54 @@
-- Initial database schema for LiveDash-Node
-- This combines the init migration and transcript_content addition
-- CreateTable
CREATE TABLE "Company" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"csvUrl" TEXT NOT NULL,
"csvUsername" TEXT,
"csvPassword" TEXT,
"sentimentAlert" REAL,
"dashboardOpts" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
-- CreateTable
CREATE TABLE "User" (
"id" TEXT NOT NULL PRIMARY KEY,
"email" TEXT NOT NULL,
"password" TEXT NOT NULL,
"companyId" TEXT NOT NULL,
"role" TEXT NOT NULL,
"resetToken" TEXT,
"resetTokenExpiry" DATETIME,
CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateTable
CREATE TABLE "Session" (
"id" TEXT NOT NULL PRIMARY KEY,
"companyId" TEXT NOT NULL,
"startTime" DATETIME NOT NULL,
"endTime" DATETIME NOT NULL,
"ipAddress" TEXT,
"country" TEXT,
"language" TEXT,
"messagesSent" INTEGER,
"sentiment" REAL,
"escalated" BOOLEAN,
"forwardedHr" BOOLEAN,
"fullTranscriptUrl" TEXT,
"transcriptContent" TEXT,
"avgResponseTime" REAL,
"tokens" INTEGER,
"tokensEur" REAL,
"category" TEXT,
"initialMsg" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");

9506
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -3,29 +3,64 @@
"type": "module", "type": "module",
"version": "0.2.0", "version": "0.2.0",
"private": true, "private": true,
"scripts": {
"build": "next build",
"format": "pnpm dlx prettier --write .",
"format:check": "pnpm dlx prettier --check .",
"format:standard": "pnpm dlx standard . --fix",
"lint": "next lint",
"lint:fix": "pnpm dlx eslint --fix",
"prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev",
"prisma:seed": "node prisma/seed.mjs",
"prisma:studio": "prisma studio",
"start": "next start",
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
"cf-typegen": "wrangler types",
"check": "tsc && wrangler deploy --dry-run",
"deploy": "wrangler deploy",
"dev": "next dev",
"dev:old": "next dev --turbopack",
"dev:cf": "wrangler dev",
"predeploy": "wrangler d1 migrations apply DB --remote",
"seedLocalD1": "wrangler d1 migrations apply DB --local",
"d1:list": "wrangler d1 list",
"d1:info": "wrangler d1 info d1-notso-livedash",
"d1:info:remote": "wrangler d1 info d1-notso-livedash --remote",
"d1:query": "node scripts/d1-query.js",
"d1:export": "wrangler d1 export d1-notso-livedash",
"d1:export:remote": "wrangler d1 export d1-notso-livedash --remote",
"d1:backup": "wrangler d1 export d1-notso-livedash --output backups/$(date +%Y%m%d_%H%M%S)_backup.sql",
"d1:schema": "wrangler d1 export d1-notso-livedash --no-data --output schema.sql",
"d1": "node scripts/d1.js"
},
"dependencies": { "dependencies": {
"@prisma/adapter-d1": "^6.8.2",
"@prisma/client": "^6.8.2", "@prisma/client": "^6.8.2",
"@rapideditor/country-coder": "^5.4.0", "@rapideditor/country-coder": "^5.4.0",
"@types/d3": "^7.4.3", "@types/d3": "^7.4.3",
"@types/d3-cloud": "^1.2.9", "@types/d3-cloud": "^1.2.9",
"@types/d3-selection": "^3.0.11",
"@types/geojson": "^7946.0.16", "@types/geojson": "^7946.0.16",
"@types/leaflet": "^1.9.18", "@types/leaflet": "^1.9.18",
"@types/node-fetch": "^2.6.12", "@types/node-fetch": "^2.6.12",
"bcryptjs": "^3.0.2", "bcryptjs": "^3.0.2",
"chart.js": "^4.0.0", "chart.js": "^4.4.9",
"chartjs-plugin-annotation": "^3.1.0", "chartjs-plugin-annotation": "^3.1.0",
"csv-parse": "^5.5.0", "csv-parse": "^5.6.0",
"d3": "^7.9.0", "d3": "^7.9.0",
"d3-cloud": "^1.2.7", "d3-cloud": "^1.2.7",
"d3-selection": "^3.0.0",
"i18n-iso-countries": "^7.14.0", "i18n-iso-countries": "^7.14.0",
"iso-639-1": "^3.1.5", "iso-639-1": "^3.1.5",
"leaflet": "^1.9.4", "leaflet": "^1.9.4",
"next": "^15.3.2", "next": "^15.3.3",
"next-auth": "^4.24.11", "next-auth": "^4.24.11",
"node-cron": "^4.0.7", "node-cron": "^4.1.0",
"node-fetch": "^3.3.2", "node-fetch": "^3.3.2",
"react": "^19.1.0", "react": "^19.1.0",
"react-chartjs-2": "^5.0.0", "react-chartjs-2": "^5.3.0",
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
"react-leaflet": "^5.0.0", "react-leaflet": "^5.0.0",
"react-markdown": "^10.1.0", "react-markdown": "^10.1.0",
@ -33,42 +68,28 @@
}, },
"devDependencies": { "devDependencies": {
"@eslint/eslintrc": "^3.3.1", "@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.27.0", "@eslint/js": "^9.28.0",
"@playwright/test": "^1.52.0", "@playwright/test": "^1.52.0",
"@tailwindcss/postcss": "^4.1.7", "@tailwindcss/postcss": "^4.1.8",
"@types/bcryptjs": "^2.4.2", "@types/bcryptjs": "^2.4.6",
"@types/node": "^22.15.21", "@types/node": "^22.15.29",
"@types/node-cron": "^3.0.8", "@types/node-cron": "^3.0.11",
"@types/react": "^19.1.5", "@types/react": "^19.1.6",
"@types/react-dom": "^19.1.5", "@types/react-dom": "^19.1.5",
"@typescript-eslint/eslint-plugin": "^8.32.1", "@typescript-eslint/eslint-plugin": "^8.33.0",
"@typescript-eslint/parser": "^8.32.1", "@typescript-eslint/parser": "^8.33.0",
"eslint": "^9.27.0", "eslint": "^9.28.0",
"eslint-config-next": "^15.3.2", "eslint-config-next": "^15.3.3",
"eslint-plugin-prettier": "^5.4.0", "eslint-plugin-prettier": "^5.4.1",
"markdownlint-cli2": "^0.18.1", "markdownlint-cli2": "^0.18.1",
"postcss": "^8.5.3", "postcss": "^8.5.4",
"prettier": "^3.5.3", "prettier": "^3.5.3",
"prettier-plugin-jinja-template": "^2.1.0", "prettier-plugin-jinja-template": "^2.1.0",
"prisma": "^6.8.2", "prisma": "^6.8.2",
"tailwindcss": "^4.1.7", "tailwindcss": "^4.1.8",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"typescript": "^5.0.0" "typescript": "^5.8.3",
}, "wrangler": "4.18.0"
"scripts": {
"build": "next build",
"dev": "next dev --turbopack",
"format": "npx prettier --write .",
"format:check": "npx prettier --check .",
"lint": "next lint",
"lint:fix": "npx eslint --fix",
"prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev",
"prisma:seed": "node prisma/seed.mjs",
"prisma:studio": "prisma studio",
"start": "next start",
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\""
}, },
"prettier": { "prettier": {
"bracketSpacing": true, "bracketSpacing": true,
@ -118,5 +139,22 @@
".git", ".git",
"*.json" "*.json"
] ]
},
"cloudflare": {
"label": "Worker + D1 Database",
"products": [
"Workers",
"D1"
],
"categories": [
"storage"
],
"icon_urls": [
"https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/c6fc5da3-1e0a-4608-b2f1-9628577ec800/public",
"https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/5ca0ca32-e897-4699-d4c1-6b680512f000/public"
],
"docs_url": "https://developers.cloudflare.com/d1/",
"preview_image_url": "https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/cb7cb0a9-6102-4822-633c-b76b7bb25900/public",
"publish": true
} }
} }

View File

@ -12,13 +12,27 @@ interface SessionCreateData {
} }
/** /**
* Fetches transcript content from a URL * Fetches transcript content from a URL with optional authentication
* @param url The URL to fetch the transcript from * @param url The URL to fetch the transcript from
* @param username Optional username for Basic Auth
* @param password Optional password for Basic Auth
* @returns The transcript content or null if fetching fails * @returns The transcript content or null if fetching fails
*/ */
async function fetchTranscriptContent(url: string): Promise<string | null> { async function fetchTranscriptContent(
url: string,
username?: string,
password?: string
): Promise<string | null> {
try { try {
const response = await fetch(url); const authHeader =
username && password
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
: undefined;
const response = await fetch(url, {
headers: authHeader ? { Authorization: authHeader } : {},
});
if (!response.ok) { if (!response.ok) {
process.stderr.write( process.stderr.write(
`Error fetching transcript: ${response.statusText}\n` `Error fetching transcript: ${response.statusText}\n`
@ -111,7 +125,9 @@ export default async function handler(
let transcriptContent: string | null = null; let transcriptContent: string | null = null;
if (session.fullTranscriptUrl) { if (session.fullTranscriptUrl) {
transcriptContent = await fetchTranscriptContent( transcriptContent = await fetchTranscriptContent(
session.fullTranscriptUrl session.fullTranscriptUrl,
company.csvUsername as string | undefined,
company.csvPassword as string | undefined
); );
} }

6883
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +1,12 @@
// Database schema, one company = one org, linked to users and CSV config // Database schema, one company = one org, linked to users and CSV config
generator client { generator client {
provider = "prisma-client-js" provider = "prisma-client-js"
previewFeatures = ["driverAdapters"]
} }
datasource db { datasource db {
provider = "sqlite" provider = "sqlite"
url = "file:./dev.db" url = env("DATABASE_URL")
} }
model Company { model Company {

184
scripts/d1-manager.js Normal file
View File

@ -0,0 +1,184 @@
#!/usr/bin/env node
/**
* Comprehensive D1 Database Management Script
*
* Usage Examples:
* node scripts/d1-manager.js tables
* node scripts/d1-manager.js schema Company
* node scripts/d1-manager.js count User
* node scripts/d1-manager.js query "SELECT * FROM User LIMIT 5"
* node scripts/d1-manager.js backup
* node scripts/d1-manager.js --remote query "SELECT COUNT(*) FROM Session"
*/
import { execSync } from 'child_process';
import { writeFileSync, mkdirSync } from 'fs';
import { join } from 'path';
const DB_NAME = 'd1-notso-livedash';
const args = process.argv.slice(2);
// Parse flags
const isRemote = args.includes('--remote');
const filteredArgs = args.filter(arg => !arg.startsWith('--'));
if (filteredArgs.length === 0) {
showHelp();
process.exit(1);
}
const command = filteredArgs[ 0 ];
const params = filteredArgs.slice(1);
function showHelp() {
console.log(`
🗄️ D1 Database Manager for ${DB_NAME}
Usage: node scripts/d1-manager.js [--remote] <command> [params...]
Commands:
info Show database information
tables List all tables
schema <table> Show table schema
count <table> Count rows in table
query "<sql>" Execute custom SQL query
backup [filename] Export database to SQL file
backup-schema Export just the schema
recent-logs Show recent query activity
Flags:
--remote Execute against remote D1 (production)
Examples:
node scripts/d1-manager.js tables
node scripts/d1-manager.js schema User
node scripts/d1-manager.js count Company
node scripts/d1-manager.js query "SELECT * FROM User WHERE role = 'admin'"
node scripts/d1-manager.js backup
node scripts/d1-manager.js --remote info
`);
}
function execute(sql, silent = false) {
const remoteFlag = isRemote ? '--remote' : '';
const cmd = `npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "${sql}"`;
if (!silent) {
console.log(`🔍 Executing${isRemote ? ' (remote)' : ' (local)'}: ${sql}\\n`);
}
try {
return execSync(cmd, { encoding: 'utf8' });
} catch (error) {
console.error('❌ Query failed:', error.message);
process.exit(1);
}
}
function wranglerCommand(subcommand, silent = false) {
const remoteFlag = isRemote ? '--remote' : '';
const cmd = `npx wrangler d1 ${subcommand} ${DB_NAME} ${remoteFlag}`;
if (!silent) {
console.log(`📊 Running: ${cmd}\\n`);
}
try {
return execSync(cmd, { stdio: 'inherit' });
} catch (error) {
console.error('❌ Command failed:', error.message);
process.exit(1);
}
}
switch (command) {
case 'info':
wranglerCommand('info');
break;
case 'tables':
console.log('📋 Listing all tables:\\n');
execute("SELECT name, type FROM sqlite_master WHERE type IN ('table', 'view') AND name NOT LIKE 'sqlite_%' ORDER BY name;");
break;
case 'schema':
if (!params[ 0 ]) {
console.error('❌ Please specify a table name');
console.log('Usage: node scripts/d1-manager.js schema <table_name>');
process.exit(1);
}
console.log(`🏗️ Schema for table '${params[ 0 ]}':\\n`);
execute(`PRAGMA table_info(${params[ 0 ]});`);
break;
case 'count':
if (!params[ 0 ]) {
console.error('❌ Please specify a table name');
console.log('Usage: node scripts/d1-manager.js count <table_name>');
process.exit(1);
}
console.log(`🔢 Row count for table '${params[ 0 ]}':\\n`);
execute(`SELECT COUNT(*) as row_count FROM ${params[ 0 ]};`);
break;
case 'query':
if (!params[ 0 ]) {
console.error('❌ Please specify a SQL query');
console.log('Usage: node scripts/d1-manager.js query "SELECT * FROM table"');
process.exit(1);
}
execute(params[ 0 ]);
break;
case 'backup':
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const filename = params[ 0 ] || `backup_${timestamp}.sql`;
try {
mkdirSync('backups', { recursive: true });
} catch (e) {
// Directory might already exist
}
const backupPath = join('backups', filename);
console.log(`💾 Creating backup: ${backupPath}\\n`);
wranglerCommand(`export --output ${backupPath}`);
console.log(`\\n✅ Backup created successfully: ${backupPath}`);
break;
case 'backup-schema':
try {
mkdirSync('backups', { recursive: true });
} catch (e) {
// Directory might already exist
}
console.log('📜 Exporting schema only...\\n');
wranglerCommand('export --no-data --output backups/schema.sql');
console.log('\\n✅ Schema exported to backups/schema.sql');
break;
case 'recent-logs':
console.log('📊 Recent database activity:\\n');
try {
wranglerCommand('insights');
} catch (error) {
console.log(' Insights not available for this database');
}
break;
case 'all-tables-info':
console.log('📊 Information about all tables:\\n');
const tables = [ 'Company', 'User', 'Session' ];
for (const table of tables) {
console.log(`\\n🏷 Table: ${table}`);
console.log('─'.repeat(50));
execute(`SELECT COUNT(*) as row_count FROM ${table};`);
}
break;
default:
console.error(`❌ Unknown command: ${command}`);
showHelp();
process.exit(1);
}

36
scripts/d1-query.js Normal file
View File

@ -0,0 +1,36 @@
#!/usr/bin/env node
/**
* Simple D1 query helper script
* Usage: node scripts/d1-query.js "SELECT * FROM User LIMIT 5"
* Usage: node scripts/d1-query.js --remote "SELECT COUNT(*) FROM Company"
*/
import { execSync } from 'child_process';
const args = process.argv.slice(2);
if (args.length === 0) {
console.log('Usage: node scripts/d1-query.js [--remote] "SQL_QUERY"');
console.log('Examples:');
console.log(' node scripts/d1-query.js "SELECT * FROM User LIMIT 5"');
console.log(' node scripts/d1-query.js --remote "SELECT COUNT(*) FROM Company"');
process.exit(1);
}
const isRemote = args.includes('--remote');
const query = args[ args.length - 1 ];
if (!query || query.startsWith('--')) {
console.error('Error: Please provide a SQL query');
process.exit(1);
}
const remoteFlag = isRemote ? '--remote' : '';
const command = `npx wrangler d1 execute d1-notso-livedash ${remoteFlag} --command "${query}"`;
try {
console.log(`🔍 Executing${isRemote ? ' (remote)' : ' (local)'}: ${query}\n`);
execSync(command, { stdio: 'inherit' });
} catch (error) {
console.error('Query failed:', error.message);
process.exit(1);
}

89
scripts/d1.js Normal file
View File

@ -0,0 +1,89 @@
#!/usr/bin/env node
/**
* Simple D1 Database CLI
* Usage: node scripts/d1.js <command> [args...]
*/
import { execSync } from 'child_process';
const DB_NAME = 'd1-notso-livedash';
const args = process.argv.slice(2);
if (args.length === 0) {
console.log(`
🗄️ Simple D1 CLI for ${DB_NAME}
Usage: node scripts/d1.js <command> [args...]
Commands:
list List databases
info Show database info
tables List all tables
schema <table> Show table schema
query "<sql>" Execute SQL query
export [file] Export database
Add --remote flag for production database
Examples:
node scripts/d1.js tables
node scripts/d1.js schema User
node scripts/d1.js query "SELECT COUNT(*) FROM Company"
node scripts/d1.js --remote info
`);
process.exit(0);
}
const isRemote = args.includes('--remote');
const filteredArgs = args.filter(arg => !arg.startsWith('--'));
const [ command, ...params ] = filteredArgs;
const remoteFlag = isRemote ? '--remote' : '';
function run(cmd) {
try {
console.log(`💫 ${cmd}`);
execSync(cmd, { stdio: 'inherit' });
} catch (error) {
console.error('❌ Command failed');
process.exit(1);
}
}
switch (command) {
case 'list':
run('npx wrangler d1 list');
break;
case 'info':
run(`npx wrangler d1 info ${DB_NAME} ${remoteFlag}`);
break;
case 'tables':
run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"`);
break;
case 'schema':
if (!params[ 0 ]) {
console.error('❌ Please specify table name');
process.exit(1);
}
run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "PRAGMA table_info(${params[ 0 ]})"`);
break;
case 'query':
if (!params[ 0 ]) {
console.error('❌ Please specify SQL query');
process.exit(1);
}
run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "${params[ 0 ]}"`);
break;
case 'export':
const filename = params[ 0 ] || `backup_${new Date().toISOString().slice(0, 10)}.sql`;
run(`npx wrangler d1 export ${DB_NAME} ${remoteFlag} --output ${filename}`);
break;
default:
console.error(`❌ Unknown command: ${command}`);
process.exit(1);
}

View File

@ -16,6 +16,7 @@ async function main() {
select: { select: {
id: true, id: true,
fullTranscriptUrl: true, fullTranscriptUrl: true,
companyId: true,
}, },
}); });
@ -28,47 +29,94 @@ async function main() {
let successCount = 0; let successCount = 0;
let errorCount = 0; let errorCount = 0;
// Group sessions by company to fetch credentials once per company
const sessionsByCompany = new Map<string, typeof sessionsToUpdate>();
for (const session of sessionsToUpdate) { for (const session of sessionsToUpdate) {
if (!session.fullTranscriptUrl) { if (!sessionsByCompany.has(session.companyId)) {
// Should not happen due to query, but good for type safety sessionsByCompany.set(session.companyId, []);
console.warn(`Session ${session.id} has no fullTranscriptUrl, skipping.`); }
sessionsByCompany.get(session.companyId)!.push(session);
}
for (const [companyId, companySessions] of Array.from(
sessionsByCompany.entries()
)) {
// Fetch company credentials once per company
const company = await prisma.company.findUnique({
where: { id: companyId },
select: {
csvUsername: true,
csvPassword: true,
name: true,
},
});
if (!company) {
console.warn(`Company ${companyId} not found, skipping sessions.`);
errorCount += companySessions.length;
continue; continue;
} }
console.log( console.log(
`Fetching transcript for session ${session.id} from ${session.fullTranscriptUrl}...` `Processing ${companySessions.length} sessions for company: ${company.name}`
); );
try {
const response = await fetch(session.fullTranscriptUrl); for (const session of companySessions) {
if (!response.ok) { if (!session.fullTranscriptUrl) {
console.error( // Should not happen due to query, but good for type safety
`Failed to fetch transcript for session ${session.id}: ${response.status} ${response.statusText}` console.warn(
`Session ${session.id} has no fullTranscriptUrl, skipping.`
); );
const errorBody = await response.text();
console.error(`Error details: ${errorBody.substring(0, 500)}`); // Log first 500 chars of error
errorCount++;
continue; continue;
} }
const transcriptText = await response.text();
if (transcriptText.trim() === "") {
console.warn(
`Fetched empty transcript for session ${session.id}. Storing as empty string.`
);
}
await prisma.session.update({
where: { id: session.id },
data: { transcriptContent: transcriptText },
});
console.log( console.log(
`Successfully fetched and stored transcript for session ${session.id}.` `Fetching transcript for session ${session.id} from ${session.fullTranscriptUrl}...`
); );
successCount++; try {
} catch (error) { // Prepare authentication if credentials are available
console.error(`Error processing session ${session.id}:`, error); const authHeader =
errorCount++; company.csvUsername && company.csvPassword
? "Basic " +
Buffer.from(
`${company.csvUsername}:${company.csvPassword}`
).toString("base64")
: undefined;
const response = await fetch(session.fullTranscriptUrl, {
headers: authHeader ? { Authorization: authHeader } : {},
});
if (!response.ok) {
console.error(
`Failed to fetch transcript for session ${session.id}: ${response.status} ${response.statusText}`
);
const errorBody = await response.text();
console.error(`Error details: ${errorBody.substring(0, 500)}`); // Log first 500 chars of error
errorCount++;
continue;
}
const transcriptText = await response.text();
if (transcriptText.trim() === "") {
console.warn(
`Fetched empty transcript for session ${session.id}. Storing as empty string.`
);
}
await prisma.session.update({
where: { id: session.id },
data: { transcriptContent: transcriptText },
});
console.log(
`Successfully fetched and stored transcript for session ${session.id}.`
);
successCount++;
} catch (error) {
console.error(`Error processing session ${session.id}:`, error);
errorCount++;
}
} }
} }

223
src/index.ts Normal file
View File

@ -0,0 +1,223 @@
// Cloudflare Worker entry point for LiveDash-Node
// This file handles requests when deployed to Cloudflare Workers
import { PrismaClient } from '@prisma/client';
import { PrismaD1 } from '@prisma/adapter-d1';
import { formatError } from './utils/error';
export interface Env {
DB: D1Database;
NEXTAUTH_SECRET?: string;
NEXTAUTH_URL?: string;
}
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
try {
// Initialize Prisma with D1 adapter
const adapter = new PrismaD1(env.DB);
const prisma = new PrismaClient({ adapter });
const url = new URL(request.url);
// CORS headers for all responses
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
};
// Handle preflight requests
if (request.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
// Handle API routes
if (url.pathname.startsWith('/api/')) {
// Simple health check endpoint
if (url.pathname === '/api/health') {
const companyCount = await prisma.company.count();
const sessionCount = await prisma.session.count();
return new Response(
JSON.stringify({
status: 'healthy',
database: 'connected',
companies: companyCount,
sessions: sessionCount,
timestamp: new Date().toISOString()
}),
{
headers: {
'Content-Type': 'application/json',
...corsHeaders
},
}
);
}
// Test metrics endpoint
if (url.pathname === '/api/test-metrics') {
const sessions = await prisma.session.findMany({
take: 10,
orderBy: { startTime: 'desc' }
});
return new Response(
JSON.stringify({
message: 'LiveDash API running on Cloudflare Workers with D1',
recentSessions: sessions.length,
sessions: sessions
}),
{
headers: {
'Content-Type': 'application/json',
...corsHeaders
},
}
);
}
// For other API routes, return a placeholder response
return new Response(
JSON.stringify({
message: 'API endpoint not implemented in worker yet',
path: url.pathname,
method: request.method,
note: 'This endpoint needs to be migrated from Next.js API routes'
}),
{
status: 501,
headers: {
'Content-Type': 'application/json',
...corsHeaders
},
}
);
}
// Handle root path - simple test page
if (url.pathname === '/') {
try {
const companies = await prisma.company.findMany();
const recentSessions = await prisma.session.findMany({
take: 5,
orderBy: { startTime: 'desc' },
include: { company: { select: { name: true } } }
});
return new Response(
`
<!DOCTYPE html>
<html>
<head>
<title>LiveDash-Node on Cloudflare Workers</title>
<link rel="stylesheet" type="text/css" href="https://static.integrations.cloudflare.com/styles.css">
<style>
.container { max-width: 1000px; margin: 0 auto; padding: 20px; }
.grid { display: grid; grid-template-columns: 1fr 1fr; gap: 20px; margin: 20px 0; }
.card { background: #f8f9fa; padding: 20px; border-radius: 8px; border: 1px solid #e9ecef; }
pre { background: #f5f5f5; padding: 15px; border-radius: 5px; overflow-x: auto; font-size: 12px; }
.api-list { list-style: none; padding: 0; }
.api-list li { margin: 8px 0; }
.api-list a { color: #0066cc; text-decoration: none; }
.api-list a:hover { text-decoration: underline; }
.status { color: #28a745; font-weight: bold; }
</style>
</head>
<body>
<div class="container">
<header>
<img
src="https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/30e0d3f6-6076-40f8-7abb-8a7676f83c00/public"
/>
<h1>🎉 LiveDash-Node Successfully Connected to D1!</h1>
<p class="status">✓ Database Connected | ✓ Prisma Client Working | ✓ D1 Adapter Active</p>
</header>
<div class="grid">
<div class="card">
<h3>📊 Database Stats</h3>
<ul>
<li><strong>Companies:</strong> ${companies.length}</li>
<li><strong>Recent Sessions:</strong> ${recentSessions.length}</li>
</ul>
</div>
<div class="card">
<h3>🔗 Test API Endpoints</h3>
<ul class="api-list">
<li><a href="/api/health">/api/health</a> - Health check</li>
<li><a href="/api/test-metrics">/api/test-metrics</a> - Sample data</li>
</ul>
</div>
</div>
<div class="card">
<h3>🏢 Companies in Database</h3>
<pre>${companies.length > 0 ? JSON.stringify(companies, null, 2) : 'No companies found'}</pre>
</div>
<div class="card">
<h3>📈 Recent Sessions</h3>
<pre>${recentSessions.length > 0 ? JSON.stringify(recentSessions, null, 2) : 'No sessions found'}</pre>
</div>
<footer style="margin-top: 40px; text-align: center; color: #666;">
<small>
<a target="_blank" href="https://developers.cloudflare.com/d1/">Learn more about Cloudflare D1</a> |
<a target="_blank" href="https://www.prisma.io/docs/guides/deployment/deployment-guides/deploying-to-cloudflare-workers">Prisma + Workers Guide</a>
</small>
</footer>
</div>
</body>
</html>
`,
{
headers: {
'Content-Type': 'text/html',
...corsHeaders
},
}
);
} catch (dbError) {
return new Response(
`
<!DOCTYPE html>
<html>
<head><title>LiveDash-Node - Database Error</title></head>
<body>
<h1>❌ Database Connection Error</h1>
<p>Error: ${dbError instanceof Error ? dbError.message : 'Unknown database error'}</p>
<p>Check your D1 database configuration and make sure migrations have been applied.</p>
</body>
</html>
`,
{
status: 500,
headers: { 'Content-Type': 'text/html' },
}
);
}
}
// Handle all other routes
return new Response('Not Found - This endpoint is not available in the worker deployment', {
status: 404,
headers: corsHeaders
});
} catch (error) {
console.error('Worker error:', error);
return new Response(JSON.stringify(formatError(error)), {
status: 500,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'
}
});
}
},
};

13
src/utils/error.ts Normal file
View File

@ -0,0 +1,13 @@
export function formatError(error: unknown): Record<string, unknown> {
const payload: Record<string, unknown> = {
error: 'Internal Server Error',
message: error instanceof Error ? error.message : 'Unknown error'
};
if (
typeof process !== 'undefined' &&
process.env?.NODE_ENV !== 'production'
) {
payload.stack = error instanceof Error ? error.stack : undefined;
}
return payload;
}

23
tests/formatError.test.ts Normal file
View File

@ -0,0 +1,23 @@
import { test } from 'node:test';
import assert from 'node:assert';
import { formatError } from '../src/utils/error';
const originalEnv = process.env.NODE_ENV;
test('includes stack when not in production', () => {
delete process.env.NODE_ENV;
const err = new Error('boom');
const payload = formatError(err);
assert.ok('stack' in payload);
});
test('omits stack in production', () => {
process.env.NODE_ENV = 'production';
const err = new Error('boom');
const payload = formatError(err);
assert.ok(!('stack' in payload));
});
test.after(() => {
if (originalEnv === undefined) delete process.env.NODE_ENV; else process.env.NODE_ENV = originalEnv;
});

View File

@ -8,9 +8,11 @@
"jsx": "preserve", "jsx": "preserve",
"lib": ["dom", "dom.iterable", "esnext"], "lib": ["dom", "dom.iterable", "esnext"],
"module": "esnext", "module": "esnext",
"moduleResolution": "node", "moduleResolution": "node", // bundler
"noEmit": true, "noEmit": true,
"noImplicitAny": false, // Allow implicit any types "noImplicitAny": false, // Allow implicit any types
"preserveSymlinks": false,
"types": ["./worker-configuration.d.ts"],
"paths": { "paths": {
"@/*": ["./*"] "@/*": ["./*"]
}, },
@ -23,10 +25,11 @@
"skipLibCheck": true, "skipLibCheck": true,
"strict": true, "strict": true,
"strictNullChecks": true, "strictNullChecks": true,
"target": "es5" "target": "ESNext"
}, },
"exclude": ["node_modules"], "exclude": ["node_modules"],
"include": [ "include": [
"src",
"next-env.d.ts", "next-env.d.ts",
"**/*.ts", "**/*.ts",
"**/*.tsx", "**/*.tsx",

6870
worker-configuration.d.ts vendored Normal file

File diff suppressed because it is too large Load Diff

55
wrangler.json Normal file
View File

@ -0,0 +1,55 @@
/**
* For more details on how to configure Wrangler, refer to:
* https://developers.cloudflare.com/workers/wrangler/configuration/
*/
{
"$schema": "node_modules/wrangler/config-schema.json",
"compatibility_date": "2025-04-01",
"main": "src/index.ts",
"name": "livedash",
"upload_source_maps": true,
"d1_databases": [
{
"binding": "DB",
"database_id": "d4ee7efe-d37a-48e4-bed7-fdfaa5108131",
"database_name": "d1-notso-livedash"
}
],
"observability": {
"enabled": true
}
/**
* Smart Placement
* Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement
*/
// "placement": { "mode": "smart" },
/**
* Bindings
* Bindings allow your Worker to interact with resources on the Cloudflare Developer Platform, including
* databases, object storage, AI inference, real-time communication and more.
* https://developers.cloudflare.com/workers/runtime-apis/bindings/
*/
/**
* Environment Variables
* https://developers.cloudflare.com/workers/wrangler/configuration/#environment-variables
*/
// "vars": { "MY_VARIABLE": "production_value" },
/**
* Note: Use secrets to store sensitive data.
* https://developers.cloudflare.com/workers/configuration/secrets/
*/
/**
* Static Assets
* https://developers.cloudflare.com/workers/static-assets/binding/
*/
// "assets": { "directory": "./public/", "binding": "ASSETS" },
/**
* Service Bindings (communicate between multiple Workers)
* https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings
*/
// "services": [{ "binding": "MY_SERVICE", "service": "my-service" }]
}