mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 13:52:16 +01:00
- Introduced a new function `fetchTranscriptContent` to handle fetching transcripts with optional authentication. - Enhanced error handling and logging for transcript fetching. - Updated the `parseTranscriptToMessages` function to improve message parsing logic. - Replaced the old session processing logic with a new approach that utilizes `SessionImport` records. - Removed obsolete scripts related to manual triggers and whitespace fixing. - Updated the server initialization to remove direct server handling, transitioning to a more modular approach. - Improved overall code structure and readability across various scripts.
83 lines
2.6 KiB
TypeScript
83 lines
2.6 KiB
TypeScript
// Simplified CSV fetcher - fetches and parses CSV data without any processing
|
|
// Maps directly to SessionImport table fields
|
|
import fetch from "node-fetch";
|
|
import { parse } from "csv-parse/sync";
|
|
|
|
// Raw CSV data interface matching SessionImport schema
|
|
interface RawSessionImport {
|
|
externalSessionId: string;
|
|
startTimeRaw: string;
|
|
endTimeRaw: string;
|
|
ipAddress: string | null;
|
|
countryCode: string | null;
|
|
language: string | null;
|
|
messagesSent: number | null;
|
|
sentimentRaw: string | null;
|
|
escalatedRaw: string | null;
|
|
forwardedHrRaw: string | null;
|
|
fullTranscriptUrl: string | null;
|
|
avgResponseTimeSeconds: number | null;
|
|
tokens: number | null;
|
|
tokensEur: number | null;
|
|
category: string | null;
|
|
initialMessage: string | null;
|
|
}
|
|
|
|
/**
|
|
* Fetches and parses CSV data from a URL without any processing
|
|
* Maps CSV columns by position to SessionImport fields
|
|
* @param url The CSV URL
|
|
* @param username Optional username for authentication
|
|
* @param password Optional password for authentication
|
|
* @returns Array of raw session import data
|
|
*/
|
|
export async function fetchAndParseCsv(
|
|
url: string,
|
|
username?: string,
|
|
password?: string
|
|
): Promise<RawSessionImport[]> {
|
|
const authHeader =
|
|
username && password
|
|
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
|
: undefined;
|
|
|
|
const res = await fetch(url, {
|
|
headers: authHeader ? { Authorization: authHeader } : {},
|
|
});
|
|
|
|
if (!res.ok) {
|
|
throw new Error(`Failed to fetch CSV: ${res.status} ${res.statusText}`);
|
|
}
|
|
|
|
const text = await res.text();
|
|
|
|
// Parse CSV without headers, using positional column mapping
|
|
const records: string[][] = parse(text, {
|
|
delimiter: ",",
|
|
from_line: 1, // Start from first line (no headers)
|
|
relax_column_count: true,
|
|
skip_empty_lines: true,
|
|
trim: true,
|
|
});
|
|
|
|
// Map CSV columns by position to SessionImport fields
|
|
return records.map((row) => ({
|
|
externalSessionId: row[0] || "",
|
|
startTimeRaw: row[1] || "",
|
|
endTimeRaw: row[2] || "",
|
|
ipAddress: row[3] || null,
|
|
countryCode: row[4] || null,
|
|
language: row[5] || null,
|
|
messagesSent: row[6] ? parseInt(row[6], 10) || null : null,
|
|
sentimentRaw: row[7] || null,
|
|
escalatedRaw: row[8] || null,
|
|
forwardedHrRaw: row[9] || null,
|
|
fullTranscriptUrl: row[10] || null,
|
|
avgResponseTimeSeconds: row[11] ? parseFloat(row[11]) || null : null,
|
|
tokens: row[12] ? parseInt(row[12], 10) || null : null,
|
|
tokensEur: row[13] ? parseFloat(row[13]) || null : null,
|
|
category: row[14] || null,
|
|
initialMessage: row[15] || null,
|
|
}));
|
|
}
|