From 71c8aff125c469364def509120a517938ad901d6 Mon Sep 17 00:00:00 2001 From: Kaj Kowalski Date: Sun, 1 Jun 2025 05:22:44 +0200 Subject: [PATCH] Implement Cloudflare D1 support with Prisma, update scripts, and enhance documentation --- docs/D1_CLI_ACCESS.md | 227 ++++++++++++++++++++++++++++ lib/prisma.ts | 25 +++- migrations/0001_initial_schema.sql | 54 +++++++ package.json | 17 ++- pnpm-lock.yaml | 30 ++++ prisma/schema.prisma | 3 +- scripts/d1-manager.js | 184 +++++++++++++++++++++++ scripts/d1-query.js | 36 +++++ scripts/d1.js | 89 +++++++++++ src/index.ts | 228 +++++++++++++++++++++++++++++ wrangler.json | 2 +- 11 files changed, 884 insertions(+), 11 deletions(-) create mode 100644 docs/D1_CLI_ACCESS.md create mode 100644 migrations/0001_initial_schema.sql create mode 100644 scripts/d1-manager.js create mode 100644 scripts/d1-query.js create mode 100644 scripts/d1.js create mode 100644 src/index.ts diff --git a/docs/D1_CLI_ACCESS.md b/docs/D1_CLI_ACCESS.md new file mode 100644 index 0000000..c3142a4 --- /dev/null +++ b/docs/D1_CLI_ACCESS.md @@ -0,0 +1,227 @@ +# D1 Database Command Line Access + +This guide shows you how to access and manage your Cloudflare D1 database `d1-notso-livedash` from the command line. + +## Quick Reference + +### Using the Custom D1 CLI Script + +```bash +# Simple and fast commands +pnpm d1 tables # List all tables +pnpm d1 info # Database information +pnpm d1 schema User # Show table schema +pnpm d1 query "SELECT COUNT(*) FROM User" # Execute SQL +pnpm d1 export backup.sql # Export database + +# Remote (production) commands +pnpm d1 --remote info # Production database info +pnpm d1 --remote query "SELECT * FROM Company LIMIT 5" +``` + +### Using Package.json Scripts + +```bash +# Database information +pnpm d1:list # List all D1 databases +pnpm d1:info # Local database info +pnpm d1:info:remote # Remote database info + +# Backup and export +pnpm d1:export # Export local database +pnpm d1:export:remote # Export remote database +pnpm d1:schema # Export schema only +``` + +### Direct Wrangler Commands + +```bash +# Basic operations +npx wrangler d1 list +npx wrangler d1 info d1-notso-livedash +npx wrangler d1 execute d1-notso-livedash --command "SELECT * FROM User" + +# Remote operations (add --remote flag) +npx wrangler d1 info d1-notso-livedash --remote +npx wrangler d1 execute d1-notso-livedash --remote --command "SELECT COUNT(*) FROM Company" +``` + +## Database Schema + +Your D1 database contains these tables: + +### Company Table + +```sql +- id (TEXT, PRIMARY KEY) +- name (TEXT, NOT NULL) +- csvUrl (TEXT, NOT NULL) +- csvUsername (TEXT) +- csvPassword (TEXT) +- sentimentAlert (REAL) +- dashboardOpts (TEXT) +- createdAt (DATETIME, NOT NULL, DEFAULT CURRENT_TIMESTAMP) +- updatedAt (DATETIME, NOT NULL) +``` + +### User Table + +```sql +- id (TEXT, PRIMARY KEY) +- email (TEXT, NOT NULL) +- password (TEXT, NOT NULL) +- companyId (TEXT, NOT NULL) +- role (TEXT, NOT NULL) +- resetToken (TEXT) +- resetTokenExpiry (DATETIME) +``` + +### Session Table + +```sql +- id (TEXT, PRIMARY KEY) +- userId (TEXT, NOT NULL) +- expiresAt (DATETIME, NOT NULL) +``` + +## Common SQL Queries + +### Data Exploration + +```sql +-- Check table sizes +SELECT 'Company' as table_name, COUNT(*) as count FROM Company +UNION ALL +SELECT 'User' as table_name, COUNT(*) as count FROM User +UNION ALL +SELECT 'Session' as table_name, COUNT(*) as count FROM Session; + +-- Show all table names +SELECT name FROM sqlite_master WHERE type='table' ORDER BY name; + +-- Get table schema +PRAGMA table_info(User); +``` + +### Business Queries + +```sql +-- List companies with user counts +SELECT c.name, c.id, COUNT(u.id) as user_count +FROM Company c +LEFT JOIN User u ON c.id = u.companyId +GROUP BY c.id, c.name; + +-- Find admin users +SELECT u.email, c.name as company +FROM User u +JOIN Company c ON u.companyId = c.id +WHERE u.role = 'admin'; + +-- Active sessions +SELECT COUNT(*) as active_sessions +FROM Session +WHERE expiresAt > datetime('now'); +``` + +## Local vs Remote Databases + +- **Local Database**: Located at `.wrangler/state/v3/d1/` (for development) +- **Remote Database**: Cloudflare's production D1 database + +### When to Use Each: + +- **Local**: Development, testing, safe experimentation +- **Remote**: Production data, deployment verification + +## Database Statistics + +Current database info: + +- **Database ID**: d4ee7efe-d37a-48e4-bed7-fdfaa5108131 +- **Region**: WEUR (Western Europe) +- **Size**: ~53.2 kB +- **Tables**: 6 (including system tables) +- **Read Queries (24h)**: 65 +- **Write Queries (24h)**: 8 + +## Scripts Available + +### `/scripts/d1.js` (Recommended) + +Simple, fast CLI for common operations: + +```bash +node scripts/d1.js tables +node scripts/d1.js schema User +node scripts/d1.js query "SELECT * FROM Company" +node scripts/d1.js --remote info +``` + +### `/scripts/d1-query.js` + +Simple query executor: + +```bash +node scripts/d1-query.js "SELECT COUNT(*) FROM User" +node scripts/d1-query.js --remote "SELECT * FROM Company" +``` + +### `/scripts/d1-manager.js` + +Comprehensive database management (if needed for advanced operations): + +```bash +node scripts/d1-manager.js info +node scripts/d1-manager.js backup +``` + +## Backup and Recovery + +### Create Backups + +```bash +# Quick backup +pnpm d1 export backup_$(date +%Y%m%d).sql + +# Automated backup with timestamp +npx wrangler d1 export d1-notso-livedash --output backups/backup_$(date +%Y%m%d_%H%M%S).sql + +# Schema only backup +npx wrangler d1 export d1-notso-livedash --no-data --output schema.sql +``` + +### Restore from Backup + +```bash +# Apply SQL file to database +npx wrangler d1 execute d1-notso-livedash --file backup.sql +``` + +## Troubleshooting + +### Common Issues + +1. **"wrangler not found"**: Use `npx wrangler` instead of `wrangler` +2. **Permission denied**: Ensure you're logged into Cloudflare: `npx wrangler login` +3. **Database not found**: Check `wrangler.json` for correct binding name + +### Debug Commands + +```bash +# Check Wrangler authentication +npx wrangler whoami + +# Verify database configuration +npx wrangler d1 list + +# Test database connectivity +npx wrangler d1 execute d1-notso-livedash --command "SELECT 1" +``` + +## Security Notes + +- Local database is for development only +- Never expose production database credentials +- Use `--remote` flag carefully in production +- Regular backups are recommended for production data diff --git a/lib/prisma.ts b/lib/prisma.ts index 6ba62e6..a87a72e 100644 --- a/lib/prisma.ts +++ b/lib/prisma.ts @@ -1,5 +1,6 @@ -// Simple Prisma client setup +// Prisma client setup with support for Cloudflare D1 import { PrismaClient } from "@prisma/client"; +import { PrismaD1 } from "@prisma/adapter-d1"; // Add prisma to the NodeJS global type // This approach avoids NodeJS.Global which is not available @@ -9,12 +10,24 @@ declare const global: { prisma: PrismaClient | undefined; }; -// Initialize Prisma Client -const prisma = global.prisma || new PrismaClient(); +// Check if we're running in Cloudflare Workers environment +const isCloudflareWorker = typeof globalThis.DB !== 'undefined'; -// Save in global if we're in development -if (process.env.NODE_ENV !== "production") { - global.prisma = prisma; +// Initialize Prisma Client +let prisma: PrismaClient; + +if (isCloudflareWorker) { + // In Cloudflare Workers, use D1 adapter + const adapter = new PrismaD1(globalThis.DB); + prisma = new PrismaClient({ adapter }); +} else { + // In Next.js/Node.js, use regular SQLite + prisma = global.prisma || new PrismaClient(); + + // Save in global if we're in development + if (process.env.NODE_ENV !== "production") { + global.prisma = prisma; + } } export { prisma }; diff --git a/migrations/0001_initial_schema.sql b/migrations/0001_initial_schema.sql new file mode 100644 index 0000000..d161ae0 --- /dev/null +++ b/migrations/0001_initial_schema.sql @@ -0,0 +1,54 @@ +-- Initial database schema for LiveDash-Node +-- This combines the init migration and transcript_content addition + +-- CreateTable +CREATE TABLE "Company" ( + "id" TEXT NOT NULL PRIMARY KEY, + "name" TEXT NOT NULL, + "csvUrl" TEXT NOT NULL, + "csvUsername" TEXT, + "csvPassword" TEXT, + "sentimentAlert" REAL, + "dashboardOpts" TEXT, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL +); + +-- CreateTable +CREATE TABLE "User" ( + "id" TEXT NOT NULL PRIMARY KEY, + "email" TEXT NOT NULL, + "password" TEXT NOT NULL, + "companyId" TEXT NOT NULL, + "role" TEXT NOT NULL, + "resetToken" TEXT, + "resetTokenExpiry" DATETIME, + CONSTRAINT "User_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); + +-- CreateTable +CREATE TABLE "Session" ( + "id" TEXT NOT NULL PRIMARY KEY, + "companyId" TEXT NOT NULL, + "startTime" DATETIME NOT NULL, + "endTime" DATETIME NOT NULL, + "ipAddress" TEXT, + "country" TEXT, + "language" TEXT, + "messagesSent" INTEGER, + "sentiment" REAL, + "escalated" BOOLEAN, + "forwardedHr" BOOLEAN, + "fullTranscriptUrl" TEXT, + "transcriptContent" TEXT, + "avgResponseTime" REAL, + "tokens" INTEGER, + "tokensEur" REAL, + "category" TEXT, + "initialMsg" TEXT, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); + +-- CreateIndex +CREATE UNIQUE INDEX "User_email_key" ON "User"("email"); diff --git a/package.json b/package.json index 0cfc998..cf3cb4b 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,6 @@ "private": true, "scripts": { "build": "next build", - "dev:old": "next dev --turbopack", "format": "pnpm dlx prettier --write .", "format:check": "pnpm dlx prettier --check .", "format:standard": "pnpm dlx standard . --fix", @@ -21,11 +20,23 @@ "cf-typegen": "wrangler types", "check": "tsc && wrangler deploy --dry-run", "deploy": "wrangler deploy", - "dev": "wrangler dev", + "dev": "next dev", + "dev:old": "next dev --turbopack", + "dev:cf": "wrangler dev", "predeploy": "wrangler d1 migrations apply DB --remote", - "seedLocalD1": "wrangler d1 migrations apply DB --local" + "seedLocalD1": "wrangler d1 migrations apply DB --local", + "d1:list": "wrangler d1 list", + "d1:info": "wrangler d1 info d1-notso-livedash", + "d1:info:remote": "wrangler d1 info d1-notso-livedash --remote", + "d1:query": "node scripts/d1-query.js", + "d1:export": "wrangler d1 export d1-notso-livedash", + "d1:export:remote": "wrangler d1 export d1-notso-livedash --remote", + "d1:backup": "wrangler d1 export d1-notso-livedash --output backups/$(date +%Y%m%d_%H%M%S)_backup.sql", + "d1:schema": "wrangler d1 export d1-notso-livedash --no-data --output schema.sql", + "d1": "node scripts/d1.js" }, "dependencies": { + "@prisma/adapter-d1": "^6.8.2", "@prisma/client": "^6.8.2", "@rapideditor/country-coder": "^5.4.0", "@types/d3": "^7.4.3", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7c57b3b..7769b40 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + '@prisma/adapter-d1': + specifier: ^6.8.2 + version: 6.8.2 '@prisma/client': specifier: ^6.8.2 version: 6.8.2(prisma@6.8.2(typescript@5.8.3))(typescript@5.8.3) @@ -220,6 +223,9 @@ packages: cpu: [x64] os: [win32] + '@cloudflare/workers-types@4.20250214.0': + resolution: {integrity: sha512-+M8oOFVbyXT5GeJrYLWMUGyPf5wGB4+k59PPqdedtOig7NjZ5r4S79wMdaZ/EV5IV8JPtZBSNjTKpDnNmfxjaQ==} + '@cspotcode/source-map-support@0.8.1': resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} @@ -779,6 +785,9 @@ packages: engines: {node: '>=18'} hasBin: true + '@prisma/adapter-d1@6.8.2': + resolution: {integrity: sha512-YAFT6y0E5WBOY6IeHbT2vORa8h6bpJTEBlGC3by96bcLDSVBA0vdgTkBgg+rFuO4zEN727dWR/IdOaW0/zsHZg==} + '@prisma/client@6.8.2': resolution: {integrity: sha512-5II+vbyzv4si6Yunwgkj0qT/iY0zyspttoDrL3R4BYgLdp42/d2C8xdi9vqkrYtKt9H32oFIukvyw3Koz5JoDg==} engines: {node: '>=18.18'} @@ -797,6 +806,9 @@ packages: '@prisma/debug@6.8.2': resolution: {integrity: sha512-4muBSSUwJJ9BYth5N8tqts8JtiLT8QI/RSAzEogwEfpbYGFo9mYsInsVo8dqXdPO2+Rm5OG5q0qWDDE3nyUbVg==} + '@prisma/driver-adapter-utils@6.8.2': + resolution: {integrity: sha512-5+CzN/41gBsRmA3ekbVy1TXnSImSPBtMlxWAttVH6tg94bv4zGGRmyk5tUCdT83nl0hG1Sq2oMXR7ml6aqILvw==} + '@prisma/engines-version@6.8.0-43.2060c79ba17c6bb9f5823312b6f6b7f4a845738e': resolution: {integrity: sha512-Rkik9lMyHpFNGaLpPF3H5q5TQTkm/aE7DsGM5m92FZTvWQsvmi6Va8On3pWvqLHOt5aPUvFb/FeZTmphI4CPiQ==} @@ -2293,6 +2305,10 @@ packages: keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + ky@1.7.5: + resolution: {integrity: sha512-HzhziW6sc5m0pwi5M196+7cEBtbt0lCYi67wNsiwMUmz833wloE0gbzJPWKs1gliFKQb34huItDQX97LyOdPdA==} + engines: {node: '>=18'} + language-subtag-registry@0.3.23: resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==} @@ -3353,6 +3369,8 @@ snapshots: '@cloudflare/workerd-windows-64@1.20250525.0': optional: true + '@cloudflare/workers-types@4.20250214.0': {} + '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 @@ -3750,6 +3768,12 @@ snapshots: dependencies: playwright: 1.52.0 + '@prisma/adapter-d1@6.8.2': + dependencies: + '@cloudflare/workers-types': 4.20250214.0 + '@prisma/driver-adapter-utils': 6.8.2 + ky: 1.7.5 + '@prisma/client@6.8.2(prisma@6.8.2(typescript@5.8.3))(typescript@5.8.3)': optionalDependencies: prisma: 6.8.2(typescript@5.8.3) @@ -3761,6 +3785,10 @@ snapshots: '@prisma/debug@6.8.2': {} + '@prisma/driver-adapter-utils@6.8.2': + dependencies: + '@prisma/debug': 6.8.2 + '@prisma/engines-version@6.8.0-43.2060c79ba17c6bb9f5823312b6f6b7f4a845738e': {} '@prisma/engines@6.8.2': @@ -5497,6 +5525,8 @@ snapshots: dependencies: json-buffer: 3.0.1 + ky@1.7.5: {} + language-subtag-registry@0.3.23: {} language-tags@1.0.9: diff --git a/prisma/schema.prisma b/prisma/schema.prisma index 8f56443..664855d 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -1,11 +1,12 @@ // Database schema, one company = one org, linked to users and CSV config generator client { provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] } datasource db { provider = "sqlite" - url = "file:./dev.db" + url = env("DATABASE_URL") } model Company { diff --git a/scripts/d1-manager.js b/scripts/d1-manager.js new file mode 100644 index 0000000..a2d0d1f --- /dev/null +++ b/scripts/d1-manager.js @@ -0,0 +1,184 @@ +#!/usr/bin/env node +/** + * Comprehensive D1 Database Management Script + * + * Usage Examples: + * node scripts/d1-manager.js tables + * node scripts/d1-manager.js schema Company + * node scripts/d1-manager.js count User + * node scripts/d1-manager.js query "SELECT * FROM User LIMIT 5" + * node scripts/d1-manager.js backup + * node scripts/d1-manager.js --remote query "SELECT COUNT(*) FROM Session" + */ + +import { execSync } from 'child_process'; +import { writeFileSync, mkdirSync } from 'fs'; +import { join } from 'path'; + +const DB_NAME = 'd1-notso-livedash'; +const args = process.argv.slice(2); + +// Parse flags +const isRemote = args.includes('--remote'); +const filteredArgs = args.filter(arg => !arg.startsWith('--')); + +if (filteredArgs.length === 0) { + showHelp(); + process.exit(1); +} + +const command = filteredArgs[ 0 ]; +const params = filteredArgs.slice(1); + +function showHelp() { + console.log(` +šŸ—„ļø D1 Database Manager for ${DB_NAME} + +Usage: node scripts/d1-manager.js [--remote] [params...] + +Commands: + info Show database information + tables List all tables + schema Show table schema + count
Count rows in table + query "" Execute custom SQL query + backup [filename] Export database to SQL file + backup-schema Export just the schema + recent-logs Show recent query activity + +Flags: + --remote Execute against remote D1 (production) + +Examples: + node scripts/d1-manager.js tables + node scripts/d1-manager.js schema User + node scripts/d1-manager.js count Company + node scripts/d1-manager.js query "SELECT * FROM User WHERE role = 'admin'" + node scripts/d1-manager.js backup + node scripts/d1-manager.js --remote info +`); +} + +function execute(sql, silent = false) { + const remoteFlag = isRemote ? '--remote' : ''; + const cmd = `npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "${sql}"`; + + if (!silent) { + console.log(`šŸ” Executing${isRemote ? ' (remote)' : ' (local)'}: ${sql}\\n`); + } + + try { + return execSync(cmd, { encoding: 'utf8' }); + } catch (error) { + console.error('āŒ Query failed:', error.message); + process.exit(1); + } +} + +function wranglerCommand(subcommand, silent = false) { + const remoteFlag = isRemote ? '--remote' : ''; + const cmd = `npx wrangler d1 ${subcommand} ${DB_NAME} ${remoteFlag}`; + + if (!silent) { + console.log(`šŸ“Š Running: ${cmd}\\n`); + } + + try { + return execSync(cmd, { stdio: 'inherit' }); + } catch (error) { + console.error('āŒ Command failed:', error.message); + process.exit(1); + } +} + +switch (command) { + case 'info': + wranglerCommand('info'); + break; + + case 'tables': + console.log('šŸ“‹ Listing all tables:\\n'); + execute("SELECT name, type FROM sqlite_master WHERE type IN ('table', 'view') AND name NOT LIKE 'sqlite_%' ORDER BY name;"); + break; + + case 'schema': + if (!params[ 0 ]) { + console.error('āŒ Please specify a table name'); + console.log('Usage: node scripts/d1-manager.js schema '); + process.exit(1); + } + console.log(`šŸ—ļø Schema for table '${params[ 0 ]}':\\n`); + execute(`PRAGMA table_info(${params[ 0 ]});`); + break; + + case 'count': + if (!params[ 0 ]) { + console.error('āŒ Please specify a table name'); + console.log('Usage: node scripts/d1-manager.js count '); + process.exit(1); + } + console.log(`šŸ”¢ Row count for table '${params[ 0 ]}':\\n`); + execute(`SELECT COUNT(*) as row_count FROM ${params[ 0 ]};`); + break; + + case 'query': + if (!params[ 0 ]) { + console.error('āŒ Please specify a SQL query'); + console.log('Usage: node scripts/d1-manager.js query "SELECT * FROM table"'); + process.exit(1); + } + execute(params[ 0 ]); + break; + + case 'backup': + const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19); + const filename = params[ 0 ] || `backup_${timestamp}.sql`; + + try { + mkdirSync('backups', { recursive: true }); + } catch (e) { + // Directory might already exist + } + + const backupPath = join('backups', filename); + console.log(`šŸ’¾ Creating backup: ${backupPath}\\n`); + wranglerCommand(`export --output ${backupPath}`); + console.log(`\\nāœ… Backup created successfully: ${backupPath}`); + break; + + case 'backup-schema': + try { + mkdirSync('backups', { recursive: true }); + } catch (e) { + // Directory might already exist + } + + console.log('šŸ“œ Exporting schema only...\\n'); + wranglerCommand('export --no-data --output backups/schema.sql'); + console.log('\\nāœ… Schema exported to backups/schema.sql'); + break; + + case 'recent-logs': + console.log('šŸ“Š Recent database activity:\\n'); + try { + wranglerCommand('insights'); + } catch (error) { + console.log('ā„¹ļø Insights not available for this database'); + } + break; + + case 'all-tables-info': + console.log('šŸ“Š Information about all tables:\\n'); + const tables = [ 'Company', 'User', 'Session' ]; + for (const table of tables) { + console.log(`\\nšŸ·ļø Table: ${table}`); + console.log('─'.repeat(50)); + execute(`SELECT COUNT(*) as row_count FROM ${table};`); + } + break; + + default: + console.error(`āŒ Unknown command: ${command}`); + showHelp(); + process.exit(1); +} diff --git a/scripts/d1-query.js b/scripts/d1-query.js new file mode 100644 index 0000000..5b1c0d6 --- /dev/null +++ b/scripts/d1-query.js @@ -0,0 +1,36 @@ +#!/usr/bin/env node +/** + * Simple D1 query helper script + * Usage: node scripts/d1-query.js "SELECT * FROM User LIMIT 5" + * Usage: node scripts/d1-query.js --remote "SELECT COUNT(*) FROM Company" + */ + +import { execSync } from 'child_process'; +const args = process.argv.slice(2); + +if (args.length === 0) { + console.log('Usage: node scripts/d1-query.js [--remote] "SQL_QUERY"'); + console.log('Examples:'); + console.log(' node scripts/d1-query.js "SELECT * FROM User LIMIT 5"'); + console.log(' node scripts/d1-query.js --remote "SELECT COUNT(*) FROM Company"'); + process.exit(1); +} + +const isRemote = args.includes('--remote'); +const query = args[ args.length - 1 ]; + +if (!query || query.startsWith('--')) { + console.error('Error: Please provide a SQL query'); + process.exit(1); +} + +const remoteFlag = isRemote ? '--remote' : ''; +const command = `npx wrangler d1 execute d1-notso-livedash ${remoteFlag} --command "${query}"`; + +try { + console.log(`šŸ” Executing${isRemote ? ' (remote)' : ' (local)'}: ${query}\n`); + execSync(command, { stdio: 'inherit' }); +} catch (error) { + console.error('Query failed:', error.message); + process.exit(1); +} diff --git a/scripts/d1.js b/scripts/d1.js new file mode 100644 index 0000000..fe65f90 --- /dev/null +++ b/scripts/d1.js @@ -0,0 +1,89 @@ +#!/usr/bin/env node +/** + * Simple D1 Database CLI + * Usage: node scripts/d1.js [args...] + */ + +import { execSync } from 'child_process'; + +const DB_NAME = 'd1-notso-livedash'; +const args = process.argv.slice(2); + +if (args.length === 0) { + console.log(` +šŸ—„ļø Simple D1 CLI for ${DB_NAME} + +Usage: node scripts/d1.js [args...] + +Commands: + list List databases + info Show database info + tables List all tables + schema
Show table schema + query "" Execute SQL query + export [file] Export database + +Add --remote flag for production database + +Examples: + node scripts/d1.js tables + node scripts/d1.js schema User + node scripts/d1.js query "SELECT COUNT(*) FROM Company" + node scripts/d1.js --remote info +`); + process.exit(0); +} + +const isRemote = args.includes('--remote'); +const filteredArgs = args.filter(arg => !arg.startsWith('--')); +const [ command, ...params ] = filteredArgs; +const remoteFlag = isRemote ? '--remote' : ''; + +function run(cmd) { + try { + console.log(`šŸ’« ${cmd}`); + execSync(cmd, { stdio: 'inherit' }); + } catch (error) { + console.error('āŒ Command failed'); + process.exit(1); + } +} + +switch (command) { + case 'list': + run('npx wrangler d1 list'); + break; + + case 'info': + run(`npx wrangler d1 info ${DB_NAME} ${remoteFlag}`); + break; + + case 'tables': + run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"`); + break; + + case 'schema': + if (!params[ 0 ]) { + console.error('āŒ Please specify table name'); + process.exit(1); + } + run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "PRAGMA table_info(${params[ 0 ]})"`); + break; + + case 'query': + if (!params[ 0 ]) { + console.error('āŒ Please specify SQL query'); + process.exit(1); + } + run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "${params[ 0 ]}"`); + break; + + case 'export': + const filename = params[ 0 ] || `backup_${new Date().toISOString().slice(0, 10)}.sql`; + run(`npx wrangler d1 export ${DB_NAME} ${remoteFlag} --output ${filename}`); + break; + + default: + console.error(`āŒ Unknown command: ${command}`); + process.exit(1); +} diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..714ce1d --- /dev/null +++ b/src/index.ts @@ -0,0 +1,228 @@ +// Cloudflare Worker entry point for LiveDash-Node +// This file handles requests when deployed to Cloudflare Workers + +import { PrismaClient } from '@prisma/client'; +import { PrismaD1 } from '@prisma/adapter-d1'; + +export interface Env { + DB: D1Database; + NEXTAUTH_SECRET?: string; + NEXTAUTH_URL?: string; +} + +export default { + async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { + try { + // Initialize Prisma with D1 adapter + const adapter = new PrismaD1(env.DB); + const prisma = new PrismaClient({ adapter }); + + const url = new URL(request.url); + + // CORS headers for all responses + const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type, Authorization', + }; + + // Handle preflight requests + if (request.method === 'OPTIONS') { + return new Response(null, { headers: corsHeaders }); + } + + // Handle API routes + if (url.pathname.startsWith('/api/')) { + + // Simple health check endpoint + if (url.pathname === '/api/health') { + const companyCount = await prisma.company.count(); + const sessionCount = await prisma.session.count(); + + return new Response( + JSON.stringify({ + status: 'healthy', + database: 'connected', + companies: companyCount, + sessions: sessionCount, + timestamp: new Date().toISOString() + }), + { + headers: { + 'Content-Type': 'application/json', + ...corsHeaders + }, + } + ); + } + + // Test metrics endpoint + if (url.pathname === '/api/test-metrics') { + const sessions = await prisma.session.findMany({ + take: 10, + orderBy: { startTime: 'desc' } + }); + + return new Response( + JSON.stringify({ + message: 'LiveDash API running on Cloudflare Workers with D1', + recentSessions: sessions.length, + sessions: sessions + }), + { + headers: { + 'Content-Type': 'application/json', + ...corsHeaders + }, + } + ); + } + + // For other API routes, return a placeholder response + return new Response( + JSON.stringify({ + message: 'API endpoint not implemented in worker yet', + path: url.pathname, + method: request.method, + note: 'This endpoint needs to be migrated from Next.js API routes' + }), + { + status: 501, + headers: { + 'Content-Type': 'application/json', + ...corsHeaders + }, + } + ); + } + + // Handle root path - simple test page + if (url.pathname === '/') { + try { + const companies = await prisma.company.findMany(); + const recentSessions = await prisma.session.findMany({ + take: 5, + orderBy: { startTime: 'desc' }, + include: { company: { select: { name: true } } } + }); + + return new Response( + ` + + + + LiveDash-Node on Cloudflare Workers + + + + +
+
+ +

šŸŽ‰ LiveDash-Node Successfully Connected to D1!

+

āœ“ Database Connected | āœ“ Prisma Client Working | āœ“ D1 Adapter Active

+
+ +
+
+

šŸ“Š Database Stats

+
    +
  • Companies: ${companies.length}
  • +
  • Recent Sessions: ${recentSessions.length}
  • +
+
+ +
+

šŸ”— Test API Endpoints

+ +
+
+ +
+

šŸ¢ Companies in Database

+
${companies.length > 0 ? JSON.stringify(companies, null, 2) : 'No companies found'}
+
+ +
+

šŸ“ˆ Recent Sessions

+
${recentSessions.length > 0 ? JSON.stringify(recentSessions, null, 2) : 'No sessions found'}
+
+ + +
+ + + `, + { + headers: { + 'Content-Type': 'text/html', + ...corsHeaders + }, + } + ); + } catch (dbError) { + return new Response( + ` + + + LiveDash-Node - Database Error + +

āŒ Database Connection Error

+

Error: ${dbError instanceof Error ? dbError.message : 'Unknown database error'}

+

Check your D1 database configuration and make sure migrations have been applied.

+ + + `, + { + status: 500, + headers: { 'Content-Type': 'text/html' }, + } + ); + } + } + + // Handle all other routes + return new Response('Not Found - This endpoint is not available in the worker deployment', { + status: 404, + headers: corsHeaders + }); + + } catch (error) { + console.error('Worker error:', error); + return new Response( + JSON.stringify({ + error: 'Internal Server Error', + message: error instanceof Error ? error.message : 'Unknown error', + stack: error instanceof Error ? error.stack : undefined + }), + { + status: 500, + headers: { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*' + }, + } + ); + } + }, +}; diff --git a/wrangler.json b/wrangler.json index 247f90f..c85b7cd 100644 --- a/wrangler.json +++ b/wrangler.json @@ -6,7 +6,7 @@ "$schema": "node_modules/wrangler/config-schema.json", "compatibility_date": "2025-04-01", "main": "src/index.ts", - "name": "livedash-node", + "name": "livedash", "upload_source_maps": true, "d1_databases": [ {