mirror of
https://github.com/kjanat/livegraphs-django.git
synced 2026-01-16 09:02:11 +01:00
Implement data integration tasks with Celery, including periodic fetching and manual refresh of chat data; add utility functions for data processing and transcript handling; create views and URLs for manual data refresh; establish Redis and Celery configuration; enhance error handling and logging; introduce scripts for data cleanup and fixing dashboard data; update documentation for Redis and Celery setup and troubleshooting.
This commit is contained in:
@ -31,7 +31,15 @@ def main():
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
|
||||
|
||||
# For specific commands, insert the command name at the start of argv
|
||||
if cmd_name in ["runserver", "migrate", "makemigrations", "collectstatic", "createsuperuser", "shell", "test"]:
|
||||
if cmd_name in [
|
||||
"runserver",
|
||||
"migrate",
|
||||
"makemigrations",
|
||||
"collectstatic",
|
||||
"createsuperuser",
|
||||
"shell",
|
||||
"test",
|
||||
]:
|
||||
sys.argv.insert(1, cmd_name)
|
||||
|
||||
# Execute the Django management command
|
||||
|
||||
@ -7,6 +7,7 @@ from .forms import CustomUserChangeForm, CustomUserCreationForm
|
||||
from .models import Company, CustomUser
|
||||
|
||||
|
||||
@admin.register(CustomUser)
|
||||
class CustomUserAdmin(UserAdmin):
|
||||
add_form = CustomUserCreationForm
|
||||
form = CustomUserChangeForm
|
||||
@ -63,15 +64,11 @@ class CustomUserAdmin(UserAdmin):
|
||||
obj.save()
|
||||
|
||||
|
||||
@admin.register(Company)
|
||||
class CompanyAdmin(admin.ModelAdmin):
|
||||
list_display = ("name", "created_at", "get_employee_count")
|
||||
search_fields = ("name", "description")
|
||||
|
||||
@admin.display(description="Employees")
|
||||
def get_employee_count(self, obj):
|
||||
return obj.employees.count()
|
||||
|
||||
get_employee_count.short_description = "Employees"
|
||||
|
||||
|
||||
admin.site.register(CustomUser, CustomUserAdmin)
|
||||
admin.site.register(Company, CompanyAdmin)
|
||||
|
||||
151
dashboard_project/accounts/migrations/0001_initial.py
Normal file
151
dashboard_project/accounts/migrations/0001_initial.py
Normal file
@ -0,0 +1,151 @@
|
||||
# Generated by Django 5.2.1 on 2025-05-16 21:18
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Company",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=100)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Companies",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CustomUser",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("password", models.CharField(max_length=128, verbose_name="password")),
|
||||
(
|
||||
"last_login",
|
||||
models.DateTimeField(blank=True, null=True, verbose_name="last login"),
|
||||
),
|
||||
(
|
||||
"is_superuser",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
||||
verbose_name="superuser status",
|
||||
),
|
||||
),
|
||||
(
|
||||
"username",
|
||||
models.CharField(
|
||||
error_messages={"unique": "A user with that username already exists."},
|
||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||
max_length=150,
|
||||
unique=True,
|
||||
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
|
||||
verbose_name="username",
|
||||
),
|
||||
),
|
||||
(
|
||||
"first_name",
|
||||
models.CharField(blank=True, max_length=150, verbose_name="first name"),
|
||||
),
|
||||
(
|
||||
"last_name",
|
||||
models.CharField(blank=True, max_length=150, verbose_name="last name"),
|
||||
),
|
||||
(
|
||||
"email",
|
||||
models.EmailField(blank=True, max_length=254, verbose_name="email address"),
|
||||
),
|
||||
(
|
||||
"is_staff",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Designates whether the user can log into this admin site.",
|
||||
verbose_name="staff status",
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_active",
|
||||
models.BooleanField(
|
||||
default=True,
|
||||
help_text="Designates whether this user should be treated as active. "
|
||||
"Unselect this instead of deleting accounts.",
|
||||
verbose_name="active",
|
||||
),
|
||||
),
|
||||
(
|
||||
"date_joined",
|
||||
models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined"),
|
||||
),
|
||||
("is_company_admin", models.BooleanField(default=False)),
|
||||
(
|
||||
"groups",
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="The groups this user belongs to. A user will get all permissions "
|
||||
"granted to each of their groups.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.group",
|
||||
verbose_name="groups",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_permissions",
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="Specific permissions for this user.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.permission",
|
||||
verbose_name="user permissions",
|
||||
),
|
||||
),
|
||||
(
|
||||
"company",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="employees",
|
||||
to="accounts.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "user",
|
||||
"verbose_name_plural": "users",
|
||||
"abstract": False,
|
||||
},
|
||||
managers=[
|
||||
("objects", django.contrib.auth.models.UserManager()),
|
||||
],
|
||||
),
|
||||
]
|
||||
@ -5,18 +5,55 @@ from django.contrib import admin
|
||||
from .models import ChatSession, Dashboard, DataSource
|
||||
|
||||
|
||||
@admin.register(DataSource)
|
||||
class DataSourceAdmin(admin.ModelAdmin):
|
||||
list_display = ("name", "company", "uploaded_at", "get_session_count")
|
||||
list_display = (
|
||||
"name",
|
||||
"company",
|
||||
"uploaded_at",
|
||||
"get_external_source",
|
||||
"get_session_count",
|
||||
)
|
||||
list_filter = ("company", "uploaded_at")
|
||||
search_fields = ("name", "description", "company__name")
|
||||
ordering = ("-uploaded_at",)
|
||||
readonly_fields = ("get_external_data_status",)
|
||||
|
||||
fieldsets = (
|
||||
(None, {"fields": ("name", "description", "company")}),
|
||||
(
|
||||
"Data Source",
|
||||
{
|
||||
"fields": ("file", "external_source"),
|
||||
"description": "Either upload a file OR select an external data source. Not both.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"Stats",
|
||||
{
|
||||
"fields": ("get_external_data_status",),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@admin.display(description="Sessions")
|
||||
def get_session_count(self, obj):
|
||||
return obj.chat_sessions.count()
|
||||
|
||||
get_session_count.short_description = "Sessions"
|
||||
@admin.display(description="External Source")
|
||||
def get_external_source(self, obj):
|
||||
if obj.external_source:
|
||||
return obj.external_source.name
|
||||
return "None"
|
||||
|
||||
@admin.display(description="External Data Status")
|
||||
def get_external_data_status(self, obj):
|
||||
if obj.external_source:
|
||||
return f"Last synced: {obj.external_source.last_synced or 'Never'} | Status: {obj.external_source.get_status()}"
|
||||
return "No external data source linked"
|
||||
|
||||
|
||||
@admin.register(ChatSession)
|
||||
class ChatSessionAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"session_id",
|
||||
@ -45,21 +82,18 @@ class ChatSessionAdmin(admin.ModelAdmin):
|
||||
)
|
||||
ordering = ("-start_time",)
|
||||
|
||||
@admin.display(
|
||||
description="Company",
|
||||
ordering="data_source__company__name",
|
||||
)
|
||||
def get_company(self, obj):
|
||||
return obj.data_source.company.name
|
||||
|
||||
get_company.short_description = "Company"
|
||||
get_company.admin_order_field = "data_source__company__name"
|
||||
|
||||
|
||||
@admin.register(Dashboard)
|
||||
class DashboardAdmin(admin.ModelAdmin):
|
||||
list_display = ("name", "company", "created_at", "updated_at")
|
||||
list_filter = ("company", "created_at")
|
||||
search_fields = ("name", "description", "company__name")
|
||||
filter_horizontal = ("data_sources",)
|
||||
ordering = ("-updated_at",)
|
||||
|
||||
|
||||
admin.site.register(DataSource, DataSourceAdmin)
|
||||
admin.site.register(ChatSession, ChatSessionAdmin)
|
||||
admin.site.register(Dashboard, DashboardAdmin)
|
||||
|
||||
@ -6,3 +6,7 @@ from django.apps import AppConfig
|
||||
class DashboardConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "dashboard"
|
||||
|
||||
def ready(self):
|
||||
# Import signals
|
||||
pass
|
||||
|
||||
@ -0,0 +1,173 @@
|
||||
# dashboard/management/commands/create_test_data.py
|
||||
|
||||
import csv
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from dashboard.models import DataSource
|
||||
from data_integration.models import ChatMessage, ChatSession, ExternalDataSource
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Create test data for external data source and link it to a dashboard data source"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--company-id",
|
||||
type=int,
|
||||
help="Company ID to associate with the data source",
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sample-file",
|
||||
type=str,
|
||||
help="Path to sample CSV file",
|
||||
default="examples/sample.csv",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
company_id = options["company_id"]
|
||||
sample_file = options["sample_file"]
|
||||
|
||||
# Check if sample file exists
|
||||
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../.."))
|
||||
sample_path = os.path.join(project_root, sample_file)
|
||||
|
||||
if not os.path.exists(sample_path):
|
||||
self.stdout.write(self.style.ERROR(f"Sample file not found: {sample_path}"))
|
||||
return
|
||||
|
||||
# Create or get external data source
|
||||
ext_source, created = ExternalDataSource.objects.get_or_create(
|
||||
name="Test External Source",
|
||||
defaults={
|
||||
"api_url": "https://example.com/api",
|
||||
"is_active": True,
|
||||
"sync_interval": 3600,
|
||||
"last_synced": make_aware(datetime.now()),
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Created external data source: {ext_source.name} (ID: {ext_source.id})")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(f"Using existing external data source: {ext_source.name} (ID: {ext_source.id})")
|
||||
|
||||
# Create or get dashboard data source linked to external source
|
||||
dash_source, created = DataSource.objects.get_or_create(
|
||||
name="Test Dashboard Source",
|
||||
company_id=company_id,
|
||||
external_source=ext_source,
|
||||
defaults={"description": "Test data source linked to external API"},
|
||||
)
|
||||
|
||||
if created:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Created dashboard data source: {dash_source.name} (ID: {dash_source.id})")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(f"Using existing dashboard data source: {dash_source.name} (ID: {dash_source.id})")
|
||||
|
||||
# Import test data from CSV
|
||||
session_count = 0
|
||||
message_count = 0
|
||||
|
||||
# First clear any existing sessions
|
||||
existing_count = ChatSession.objects.filter().count()
|
||||
if existing_count > 0:
|
||||
self.stdout.write(f"Clearing {existing_count} existing chat sessions")
|
||||
ChatSession.objects.all().delete()
|
||||
|
||||
# Parse sample CSV
|
||||
with open(sample_path, "r") as f:
|
||||
reader = csv.reader(f)
|
||||
header = next(reader) # Skip header
|
||||
|
||||
for row in reader:
|
||||
# Make sure row has enough elements
|
||||
padded_row = row + [""] * (len(header) - len(row))
|
||||
|
||||
# Create a dict from the row
|
||||
data = dict(zip(header, padded_row, strict=False))
|
||||
|
||||
# Create a chat session
|
||||
try:
|
||||
# Parse dates
|
||||
try:
|
||||
start_time = make_aware(datetime.strptime(data.get("start_time", ""), "%d.%m.%Y %H:%M:%S"))
|
||||
except ValueError:
|
||||
start_time = make_aware(datetime.now() - timedelta(hours=1))
|
||||
|
||||
try:
|
||||
end_time = make_aware(datetime.strptime(data.get("end_time", ""), "%d.%m.%Y %H:%M:%S"))
|
||||
except ValueError:
|
||||
end_time = make_aware(datetime.now())
|
||||
|
||||
# Convert values to appropriate types
|
||||
escalated = data.get("escalated", "").lower() == "true"
|
||||
forwarded_hr = data.get("forwarded_hr", "").lower() == "true"
|
||||
messages_sent = int(data.get("messages_sent", 0) or 0)
|
||||
tokens = int(data.get("tokens", 0) or 0)
|
||||
tokens_eur = float(data.get("tokens_eur", 0) or 0)
|
||||
user_rating = int(data.get("user_rating", 0) or 0) if data.get("user_rating", "") else None
|
||||
|
||||
# Create session
|
||||
session = ChatSession.objects.create(
|
||||
session_id=data.get("session_id", f"test-{session_count}"),
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
ip_address=data.get("ip_address", "127.0.0.1"),
|
||||
country=data.get("country", ""),
|
||||
language=data.get("language", ""),
|
||||
messages_sent=messages_sent,
|
||||
sentiment=data.get("sentiment", ""),
|
||||
escalated=escalated,
|
||||
forwarded_hr=forwarded_hr,
|
||||
full_transcript_url=data.get("full_transcript", ""),
|
||||
avg_response_time=float(data.get("avg_response_time", 0) or 0),
|
||||
tokens=tokens,
|
||||
tokens_eur=tokens_eur,
|
||||
category=data.get("category", ""),
|
||||
initial_msg=data.get("initial_msg", ""),
|
||||
user_rating=user_rating,
|
||||
)
|
||||
|
||||
session_count += 1
|
||||
|
||||
# Create messages for this session
|
||||
if data.get("initial_msg"):
|
||||
# User message
|
||||
ChatMessage.objects.create(
|
||||
session=session,
|
||||
sender="User",
|
||||
message=data.get("initial_msg", ""),
|
||||
timestamp=start_time,
|
||||
)
|
||||
message_count += 1
|
||||
|
||||
# Assistant response
|
||||
ChatMessage.objects.create(
|
||||
session=session,
|
||||
sender="Assistant",
|
||||
message=f"This is a test response to {data.get('initial_msg', '')}",
|
||||
timestamp=start_time + timedelta(seconds=30),
|
||||
)
|
||||
message_count += 1
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Error creating session: {e}"))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Created {session_count} chat sessions with {message_count} messages"))
|
||||
|
||||
# Run the sync command to copy data to dashboard
|
||||
self.stdout.write("Syncing data to dashboard...")
|
||||
|
||||
from django.core.management import call_command
|
||||
|
||||
call_command("sync_external_data", source_id=ext_source.id)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Done! Your dashboard should now show test data."))
|
||||
@ -0,0 +1,128 @@
|
||||
# dashboard/management/commands/sync_external_data.py
|
||||
|
||||
import logging
|
||||
|
||||
from dashboard.models import ChatSession as DashboardChatSession
|
||||
from dashboard.models import DataSource
|
||||
from data_integration.models import ChatSession as ExternalChatSession
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Synchronize data from external data sources to dashboard data sources"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--source-id",
|
||||
type=int,
|
||||
help="Specific external data source ID to sync",
|
||||
required=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--clear",
|
||||
action="store_true",
|
||||
help="Clear existing dashboard data before sync",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
source_id = options.get("source_id")
|
||||
clear_existing = options.get("clear", False)
|
||||
|
||||
# Get all datasources that have an external_source
|
||||
if source_id:
|
||||
data_sources = DataSource.objects.filter(external_source_id=source_id)
|
||||
if not data_sources.exists():
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"No dashboard data sources linked to external source ID {source_id}")
|
||||
)
|
||||
return
|
||||
else:
|
||||
data_sources = DataSource.objects.exclude(external_source=None)
|
||||
if not data_sources.exists():
|
||||
self.stdout.write(self.style.WARNING("No dashboard data sources with external sources found"))
|
||||
return
|
||||
|
||||
total_synced = 0
|
||||
total_errors = 0
|
||||
|
||||
for data_source in data_sources:
|
||||
self.stdout.write(f"Processing dashboard data source: {data_source.name} (ID: {data_source.id})")
|
||||
|
||||
if not data_source.external_source:
|
||||
self.stdout.write(self.style.WARNING(f" - No external source linked to {data_source.name}"))
|
||||
continue
|
||||
|
||||
# Get all external chat sessions for this source
|
||||
external_sessions = ExternalChatSession.objects.all()
|
||||
session_count = external_sessions.count()
|
||||
|
||||
if session_count == 0:
|
||||
self.stdout.write(self.style.WARNING(" - No external sessions found"))
|
||||
continue
|
||||
|
||||
self.stdout.write(f" - Found {session_count} external sessions")
|
||||
|
||||
# Clear existing data if requested
|
||||
if clear_existing:
|
||||
existing_count = DashboardChatSession.objects.filter(data_source=data_source).count()
|
||||
if existing_count > 0:
|
||||
self.stdout.write(f" - Clearing {existing_count} existing dashboard sessions")
|
||||
DashboardChatSession.objects.filter(data_source=data_source).delete()
|
||||
|
||||
# Process each external session
|
||||
synced_count = 0
|
||||
error_count = 0
|
||||
|
||||
for ext_session in external_sessions:
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# Create or update dashboard chat session
|
||||
(
|
||||
dashboard_session,
|
||||
created,
|
||||
) = DashboardChatSession.objects.update_or_create(
|
||||
data_source=data_source,
|
||||
session_id=ext_session.session_id,
|
||||
defaults={
|
||||
"start_time": ext_session.start_time,
|
||||
"end_time": ext_session.end_time,
|
||||
"ip_address": ext_session.ip_address,
|
||||
"country": ext_session.country or "",
|
||||
"language": ext_session.language or "",
|
||||
"messages_sent": ext_session.messages_sent or 0,
|
||||
"sentiment": ext_session.sentiment or "",
|
||||
"escalated": ext_session.escalated or False,
|
||||
"forwarded_hr": ext_session.forwarded_hr or False,
|
||||
"full_transcript": ext_session.full_transcript_url or "",
|
||||
"avg_response_time": ext_session.avg_response_time,
|
||||
"tokens": ext_session.tokens or 0,
|
||||
"tokens_eur": ext_session.tokens_eur,
|
||||
"category": ext_session.category or "",
|
||||
"initial_msg": ext_session.initial_msg or "",
|
||||
"user_rating": (
|
||||
str(ext_session.user_rating) if ext_session.user_rating is not None else ""
|
||||
),
|
||||
},
|
||||
)
|
||||
synced_count += 1
|
||||
action = "Created" if created else "Updated"
|
||||
self.stdout.write(f" - {action} session: {dashboard_session.session_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f" - Error syncing session {ext_session.session_id}: {str(e)}"))
|
||||
logger.error(
|
||||
f"Error syncing session {ext_session.session_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
error_count += 1
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f" - Synced {synced_count} sessions with {error_count} errors"))
|
||||
total_synced += synced_count
|
||||
total_errors += error_count
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Sync complete. Total: {total_synced} sessions synced, {total_errors} errors")
|
||||
)
|
||||
110
dashboard_project/dashboard/migrations/0001_initial.py
Normal file
110
dashboard_project/dashboard/migrations/0001_initial.py
Normal file
@ -0,0 +1,110 @@
|
||||
# Generated by Django 5.2.1 on 2025-05-16 21:25
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="DataSource",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("file", models.FileField(upload_to="data_sources/")),
|
||||
("uploaded_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"company",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="data_sources",
|
||||
to="accounts.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Dashboard",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"company",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="dashboards",
|
||||
to="accounts.company",
|
||||
),
|
||||
),
|
||||
(
|
||||
"data_sources",
|
||||
models.ManyToManyField(related_name="dashboards", to="dashboard.datasource"),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ChatSession",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("session_id", models.CharField(max_length=255)),
|
||||
("start_time", models.DateTimeField(blank=True, null=True)),
|
||||
("end_time", models.DateTimeField(blank=True, null=True)),
|
||||
("ip_address", models.GenericIPAddressField(blank=True, null=True)),
|
||||
("country", models.CharField(blank=True, max_length=100)),
|
||||
("language", models.CharField(blank=True, max_length=50)),
|
||||
("messages_sent", models.IntegerField(default=0)),
|
||||
("sentiment", models.CharField(blank=True, max_length=50)),
|
||||
("escalated", models.BooleanField(default=False)),
|
||||
("forwarded_hr", models.BooleanField(default=False)),
|
||||
("full_transcript", models.TextField(blank=True)),
|
||||
("avg_response_time", models.FloatField(blank=True, null=True)),
|
||||
("tokens", models.IntegerField(default=0)),
|
||||
("tokens_eur", models.FloatField(blank=True, null=True)),
|
||||
("category", models.CharField(blank=True, max_length=100)),
|
||||
("initial_msg", models.TextField(blank=True)),
|
||||
("user_rating", models.CharField(blank=True, max_length=50)),
|
||||
(
|
||||
"data_source",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="chat_sessions",
|
||||
to="dashboard.datasource",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,35 @@
|
||||
# Generated by Django 5.2.1 on 2025-05-17 23:10
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("dashboard", "0001_initial"),
|
||||
("data_integration", "0002_externaldatasource_error_count_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="datasource",
|
||||
name="external_source",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Link to an external data source",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="data_integration.externaldatasource",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="datasource",
|
||||
name="file",
|
||||
field=models.FileField(
|
||||
blank=True,
|
||||
help_text="Upload a CSV file or leave empty if using an external data source",
|
||||
null=True,
|
||||
upload_to="data_sources/",
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,16 @@
|
||||
# Generated by Django 5.2.1 on 2025-05-18 00:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("dashboard", "0002_datasource_external_source_alter_datasource_file"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="chatsession",
|
||||
unique_together={("session_id", "data_source")},
|
||||
),
|
||||
]
|
||||
@ -5,11 +5,23 @@ from django.db import models
|
||||
|
||||
|
||||
class DataSource(models.Model):
|
||||
"""Model for uploaded data sources (CSV files)"""
|
||||
"""Model for data sources (CSV files or external API data)"""
|
||||
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True)
|
||||
file = models.FileField(upload_to="data_sources/")
|
||||
file = models.FileField(
|
||||
upload_to="data_sources/",
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Upload a CSV file or leave empty if using an external data source",
|
||||
)
|
||||
external_source = models.ForeignKey(
|
||||
"data_integration.ExternalDataSource",
|
||||
on_delete=models.SET_NULL,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Link to an external data source",
|
||||
)
|
||||
uploaded_at = models.DateTimeField(auto_now_add=True)
|
||||
company = models.ForeignKey(Company, on_delete=models.CASCADE, related_name="data_sources")
|
||||
|
||||
@ -42,6 +54,9 @@ class ChatSession(models.Model):
|
||||
def __str__(self):
|
||||
return f"Session {self.session_id}"
|
||||
|
||||
class Meta:
|
||||
unique_together = ("session_id", "data_source")
|
||||
|
||||
|
||||
class Dashboard(models.Model):
|
||||
"""Model for custom dashboards that can be created by users"""
|
||||
|
||||
79
dashboard_project/dashboard/signals.py
Normal file
79
dashboard_project/dashboard/signals.py
Normal file
@ -0,0 +1,79 @@
|
||||
# dashboard/signals.py
|
||||
|
||||
import logging
|
||||
|
||||
from dashboard.models import ChatSession as DashboardChatSession
|
||||
from dashboard.models import DataSource
|
||||
from data_integration.models import ChatSession as ExternalChatSession
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@receiver(post_save, sender=ExternalChatSession)
|
||||
def sync_external_session_to_dashboard(
|
||||
sender, # noqa: ARG001
|
||||
instance,
|
||||
created,
|
||||
**kwargs, # noqa: ARG001
|
||||
):
|
||||
"""
|
||||
Signal handler to sync external chat sessions to dashboard chat sessions
|
||||
whenever an external session is created or updated.
|
||||
|
||||
Args:
|
||||
sender: The model class that sent the signal (unused but required by Django's signal interface)
|
||||
instance: The ExternalChatSession instance that was saved
|
||||
created: Boolean indicating if this is a new instance
|
||||
**kwargs: Additional keyword arguments (unused but required by Django's signal interface)
|
||||
"""
|
||||
# Find all dashboard data sources that are linked to this external data source
|
||||
# Since ExternalChatSession doesn't have a direct link to ExternalDataSource,
|
||||
# we need to sync to all dashboard data sources with external sources
|
||||
data_sources = DataSource.objects.exclude(external_source=None)
|
||||
|
||||
if not data_sources.exists():
|
||||
logger.warning(f"No dashboard data sources with external sources found for session {instance.session_id}")
|
||||
return
|
||||
|
||||
for data_source in data_sources:
|
||||
try:
|
||||
# Create or update dashboard chat session
|
||||
dashboard_session, created = DashboardChatSession.objects.update_or_create(
|
||||
data_source=data_source,
|
||||
session_id=instance.session_id,
|
||||
defaults={
|
||||
"start_time": instance.start_time,
|
||||
"end_time": instance.end_time,
|
||||
"ip_address": instance.ip_address,
|
||||
"country": instance.country or "",
|
||||
"language": instance.language or "",
|
||||
"messages_sent": instance.messages_sent or 0,
|
||||
"sentiment": instance.sentiment or "",
|
||||
"escalated": instance.escalated or False,
|
||||
"forwarded_hr": instance.forwarded_hr or False,
|
||||
"full_transcript": instance.full_transcript_url or "",
|
||||
"avg_response_time": instance.avg_response_time,
|
||||
"tokens": instance.tokens or 0,
|
||||
"tokens_eur": instance.tokens_eur,
|
||||
"category": instance.category or "",
|
||||
"initial_msg": instance.initial_msg or "",
|
||||
"user_rating": (str(instance.user_rating) if instance.user_rating is not None else ""),
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
logger.info(
|
||||
f"Created dashboard session: {dashboard_session.session_id} for data source {data_source.name}"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Updated dashboard session: {dashboard_session.session_id} for data source {data_source.name}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error syncing session {instance.session_id} to data source {data_source.name}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
@ -42,4 +42,6 @@ urlpatterns = [
|
||||
path("data-view/", views.data_view, name="data_view"),
|
||||
# Export to CSV
|
||||
path("export/csv/", views_export.export_chats_csv, name="export_chats_csv"),
|
||||
# Export to JSON
|
||||
path("export/json/", views_export.export_chats_json, name="export_chats_json"),
|
||||
]
|
||||
|
||||
@ -200,7 +200,12 @@ def chat_session_detail_view(request, session_id):
|
||||
# Check if this is an AJAX navigation request
|
||||
if is_ajax_navigation(request):
|
||||
html_content = render_to_string("dashboard/chat_session_detail.html", context, request=request)
|
||||
return JsonResponse({"html": html_content, "title": f"Chat Session {session_id} | Chat Analytics"})
|
||||
return JsonResponse(
|
||||
{
|
||||
"html": html_content,
|
||||
"title": f"Chat Session {session_id} | Chat Analytics",
|
||||
}
|
||||
)
|
||||
|
||||
return render(request, "dashboard/chat_session_detail.html", context)
|
||||
|
||||
@ -277,7 +282,12 @@ def edit_dashboard_view(request, dashboard_id):
|
||||
# Check if this is an AJAX navigation request
|
||||
if is_ajax_navigation(request):
|
||||
html_content = render_to_string("dashboard/dashboard_form.html", context, request=request)
|
||||
return JsonResponse({"html": html_content, "title": f"Edit Dashboard: {dashboard.name} | Chat Analytics"})
|
||||
return JsonResponse(
|
||||
{
|
||||
"html": html_content,
|
||||
"title": f"Edit Dashboard: {dashboard.name} | Chat Analytics",
|
||||
}
|
||||
)
|
||||
|
||||
return render(request, "dashboard/dashboard_form.html", context)
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
# dashboard/views_export.py
|
||||
|
||||
import csv
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
from django.contrib.auth.decorators import login_required
|
||||
@ -135,3 +136,115 @@ def export_chats_csv(request):
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@login_required
|
||||
def export_chats_json(request):
|
||||
"""Export chat sessions to JSON with filtering options"""
|
||||
user = request.user
|
||||
company = user.company
|
||||
|
||||
if not company:
|
||||
return HttpResponse("You are not associated with any company.", status=403)
|
||||
|
||||
# Get and apply filters
|
||||
data_source_id = request.GET.get("data_source_id")
|
||||
dashboard_id = request.GET.get("dashboard_id")
|
||||
view = request.GET.get("view", "all")
|
||||
start_date = request.GET.get("start_date")
|
||||
end_date = request.GET.get("end_date")
|
||||
country = request.GET.get("country")
|
||||
sentiment = request.GET.get("sentiment")
|
||||
escalated = request.GET.get("escalated")
|
||||
|
||||
# Base queryset
|
||||
sessions = ChatSession.objects.filter(data_source__company=company)
|
||||
|
||||
# Apply data source filter if selected
|
||||
if data_source_id:
|
||||
data_source = get_object_or_404(DataSource, id=data_source_id, company=company)
|
||||
sessions = sessions.filter(data_source=data_source)
|
||||
|
||||
# Apply dashboard filter if selected
|
||||
if dashboard_id:
|
||||
dashboard = get_object_or_404(Dashboard, id=dashboard_id, company=company)
|
||||
data_sources = dashboard.data_sources.all()
|
||||
sessions = sessions.filter(data_source__in=data_sources)
|
||||
|
||||
# Apply view filter
|
||||
if view == "recent":
|
||||
seven_days_ago = timezone.now() - timedelta(days=7)
|
||||
sessions = sessions.filter(start_time__gte=seven_days_ago)
|
||||
elif view == "positive":
|
||||
sessions = sessions.filter(Q(sentiment__icontains="positive"))
|
||||
elif view == "negative":
|
||||
sessions = sessions.filter(Q(sentiment__icontains="negative"))
|
||||
elif view == "escalated":
|
||||
sessions = sessions.filter(escalated=True)
|
||||
|
||||
# Apply additional filters
|
||||
if start_date:
|
||||
sessions = sessions.filter(start_time__date__gte=start_date)
|
||||
if end_date:
|
||||
sessions = sessions.filter(start_time__date__lte=end_date)
|
||||
if country:
|
||||
sessions = sessions.filter(country__icontains=country)
|
||||
if sentiment:
|
||||
sessions = sessions.filter(sentiment__icontains=sentiment)
|
||||
if escalated:
|
||||
escalated_val = escalated.lower() == "true"
|
||||
sessions = sessions.filter(escalated=escalated_val)
|
||||
|
||||
# Order by most recent first
|
||||
sessions = sessions.order_by("-start_time")
|
||||
|
||||
# Create the filename
|
||||
filename = "chat_sessions"
|
||||
if dashboard_id:
|
||||
dashboard = Dashboard.objects.get(id=dashboard_id)
|
||||
filename = f"{dashboard.name.replace(' ', '_').lower()}_chat_sessions"
|
||||
elif data_source_id:
|
||||
data_source = DataSource.objects.get(id=data_source_id)
|
||||
filename = f"{data_source.name.replace(' ', '_').lower()}_chat_sessions"
|
||||
|
||||
# Prepare the data for JSON export using list comprehension
|
||||
data = [
|
||||
{
|
||||
"session_id": session.session_id,
|
||||
"start_time": (session.start_time.isoformat() if session.start_time else None),
|
||||
"end_time": session.end_time.isoformat() if session.end_time else None,
|
||||
"ip_address": session.ip_address,
|
||||
"country": session.country,
|
||||
"language": session.language,
|
||||
"messages_sent": session.messages_sent,
|
||||
"sentiment": session.sentiment,
|
||||
"escalated": session.escalated,
|
||||
"forwarded_hr": session.forwarded_hr,
|
||||
"full_transcript": session.full_transcript,
|
||||
"avg_response_time": session.avg_response_time,
|
||||
"tokens": session.tokens,
|
||||
"tokens_eur": session.tokens_eur,
|
||||
"category": session.category,
|
||||
"initial_msg": session.initial_msg,
|
||||
"user_rating": session.user_rating,
|
||||
}
|
||||
for session in sessions
|
||||
]
|
||||
|
||||
# Create the HttpResponse with JSON header
|
||||
response = HttpResponse(content_type="application/json")
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}.json"'
|
||||
|
||||
# Add company and timestamp to the exported JSON
|
||||
current_time = timezone.now().isoformat()
|
||||
export_data = {
|
||||
"company": company.name,
|
||||
"export_date": current_time,
|
||||
"export_type": "chat_sessions",
|
||||
"data": data,
|
||||
}
|
||||
|
||||
# Write JSON data to the response
|
||||
json.dump(export_data, response, indent=2)
|
||||
|
||||
return response
|
||||
|
||||
@ -1 +1,3 @@
|
||||
# This file is intentionally left empty to mark the directory as a Python package
|
||||
from .celery import app as celery_app
|
||||
|
||||
__all__ = ("celery_app",)
|
||||
|
||||
22
dashboard_project/dashboard_project/celery.py
Normal file
22
dashboard_project/dashboard_project/celery.py
Normal file
@ -0,0 +1,22 @@
|
||||
import os
|
||||
|
||||
from celery import Celery
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
|
||||
|
||||
app = Celery("dashboard_project")
|
||||
|
||||
# Using a string here means the worker doesn't have to serialize
|
||||
# the configuration object to child processes.
|
||||
# - namespace='CELERY' means all celery-related configuration keys
|
||||
# should have a `CELERY_` prefix.
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def debug_task(self):
|
||||
print(f"Request: {self.request!r}")
|
||||
@ -1,16 +1,27 @@
|
||||
# dashboard_project/settings.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.utils import get_random_secret_key
|
||||
|
||||
# Load environment variables from .env file if present
|
||||
try:
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = "django-insecure-your-secret-key-here" # nosec: B105
|
||||
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", get_random_secret_key())
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = True
|
||||
DEBUG = os.environ.get("DJANGO_DEBUG", "True") == "True"
|
||||
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
@ -29,9 +40,11 @@ INSTALLED_APPS = [
|
||||
"allauth.socialaccount",
|
||||
"crispy_forms",
|
||||
"crispy_bootstrap5",
|
||||
"django_celery_beat",
|
||||
# Custom apps
|
||||
"dashboard.apps.DashboardConfig",
|
||||
"accounts.apps.AccountsConfig",
|
||||
"data_integration",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
@ -91,7 +104,7 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
]
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = "nl"
|
||||
LANGUAGE_CODE = "en-US"
|
||||
TIME_ZONE = "Europe/Amsterdam"
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
@ -102,7 +115,14 @@ STATICFILES_DIRS = [
|
||||
os.path.join(BASE_DIR, "static"),
|
||||
]
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
STORAGES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
# Media files
|
||||
MEDIA_URL = "/media/"
|
||||
@ -128,3 +148,50 @@ AUTHENTICATION_BACKENDS = [
|
||||
]
|
||||
SITE_ID = 1
|
||||
ACCOUNT_EMAIL_VERIFICATION = "none"
|
||||
|
||||
# Celery Configuration
|
||||
# Check if Redis is available
|
||||
try:
|
||||
import redis
|
||||
|
||||
redis_client = redis.Redis(
|
||||
host=os.environ.get("REDIS_HOST", "localhost"),
|
||||
port=int(os.environ.get("REDIS_PORT", 6379)),
|
||||
db=int(os.environ.get("REDIS_DB", 0)),
|
||||
socket_connect_timeout=2, # 2 seconds timeout
|
||||
)
|
||||
redis_client.ping()
|
||||
# Redis is available, use it
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
|
||||
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "redis://localhost:6379/0")
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Using Redis for Celery broker and result backend")
|
||||
except (
|
||||
ImportError,
|
||||
redis.exceptions.ConnectionError,
|
||||
redis.exceptions.TimeoutError,
|
||||
) as e:
|
||||
# Redis is not available, use SQLite as fallback (works for development)
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "sqla+sqlite:///celery.sqlite")
|
||||
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "db+sqlite:///results.sqlite")
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning(f"Redis connection failed: {str(e)}. Using SQLite for Celery.")
|
||||
|
||||
CELERY_ACCEPT_CONTENT = ["json"]
|
||||
CELERY_TASK_SERIALIZER = "json"
|
||||
CELERY_RESULT_SERIALIZER = "json"
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"
|
||||
|
||||
# Get schedule from environment variables or use defaults
|
||||
CHAT_DATA_FETCH_INTERVAL = int(os.environ.get("CHAT_DATA_FETCH_INTERVAL", 3600)) # Default: 1 hour
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"fetch_chat_data_periodic": {
|
||||
"task": "data_integration.tasks.periodic_fetch_chat_data",
|
||||
"schedule": CHAT_DATA_FETCH_INTERVAL,
|
||||
"options": {
|
||||
"expires": CHAT_DATA_FETCH_INTERVAL - 10, # 10 seconds before next run
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
# dashboard_project/urls.py
|
||||
|
||||
from data_integration.views import refresh_specific_datasource
|
||||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.contrib import admin
|
||||
@ -7,10 +8,23 @@ from django.urls import include, path
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
urlpatterns = [
|
||||
# Additional admin URLs should come BEFORE admin.site.urls
|
||||
path(
|
||||
"admin/data_integration/externaldatasource/refresh/<int:source_id>/",
|
||||
refresh_specific_datasource,
|
||||
name="admin_refresh_datasource",
|
||||
),
|
||||
# Alternative URL pattern for direct access
|
||||
path(
|
||||
"admin/data_integration/refresh/<int:source_id>/",
|
||||
refresh_specific_datasource,
|
||||
name="admin_refresh_datasource_alt",
|
||||
),
|
||||
path("admin/", admin.site.urls),
|
||||
path("accounts/", include("accounts.urls")),
|
||||
path("dashboard/", include("dashboard.urls")),
|
||||
path("", RedirectView.as_view(url="dashboard/", permanent=False)),
|
||||
path("data/", include("data_integration.urls", namespace="data_integration")),
|
||||
]
|
||||
|
||||
if settings.DEBUG:
|
||||
|
||||
0
dashboard_project/data_integration/__init__.py
Normal file
0
dashboard_project/data_integration/__init__.py
Normal file
125
dashboard_project/data_integration/admin.py
Normal file
125
dashboard_project/data_integration/admin.py
Normal file
@ -0,0 +1,125 @@
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
|
||||
from .models import ChatMessage, ChatSession, ExternalDataSource
|
||||
from .tasks import refresh_specific_source
|
||||
|
||||
|
||||
@admin.register(ExternalDataSource)
|
||||
class ExternalDataSourceAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"name",
|
||||
"api_url",
|
||||
"is_active",
|
||||
"last_synced",
|
||||
"status_badge",
|
||||
"sync_interval",
|
||||
"refresh_action",
|
||||
)
|
||||
list_filter = ("is_active",)
|
||||
search_fields = ("name", "api_url")
|
||||
readonly_fields = ("last_synced", "error_count", "last_error")
|
||||
fieldsets = (
|
||||
(None, {"fields": ("name", "api_url", "is_active")}),
|
||||
(
|
||||
"Authentication",
|
||||
{
|
||||
"fields": ("auth_username", "auth_password"),
|
||||
"description": "Credentials can also be provided via environment variables.",
|
||||
},
|
||||
),
|
||||
("Sync Settings", {"fields": ("sync_interval", "timeout")}),
|
||||
("Status", {"fields": ("last_synced", "error_count", "last_error")}),
|
||||
)
|
||||
|
||||
@admin.display(description="Status")
|
||||
def status_badge(self, obj):
|
||||
"""Display a colored status badge"""
|
||||
status = obj.get_status()
|
||||
if status == "Active":
|
||||
return format_html(
|
||||
'<span style="color: white; background-color: green; padding: 3px 8px; border-radius: 10px;">{}</span>',
|
||||
status,
|
||||
)
|
||||
elif status == "Inactive":
|
||||
return format_html(
|
||||
'<span style="color: white; background-color: gray; padding: 3px 8px; border-radius: 10px;">{}</span>',
|
||||
status,
|
||||
)
|
||||
elif "Error" in status:
|
||||
return format_html(
|
||||
'<span style="color: white; background-color: red; padding: 3px 8px; border-radius: 10px;">{}</span>',
|
||||
status,
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: white; background-color: orange; padding: 3px 8px; border-radius: 10px;">{}</span>',
|
||||
status,
|
||||
)
|
||||
|
||||
@admin.display(description="Actions")
|
||||
def refresh_action(self, obj):
|
||||
"""Button to manually refresh a data source"""
|
||||
if obj.is_active:
|
||||
url = f"/admin/data_integration/externaldatasource/refresh/{obj.id}/"
|
||||
return format_html('<a class="button" href="{}">Refresh Now</a>', url)
|
||||
return "Inactive"
|
||||
|
||||
def refresh_source(self, request, source_id):
|
||||
"""Run a task to refresh the source data"""
|
||||
task = refresh_specific_source.delay(source_id)
|
||||
self.message_user(request, f"Data refresh task started (Task ID: {task.id})")
|
||||
|
||||
def get_urls(self):
|
||||
from django.urls import path
|
||||
|
||||
urls = super().get_urls()
|
||||
custom_urls = [
|
||||
path(
|
||||
"refresh/<int:source_id>/",
|
||||
self.admin_site.admin_view(self.refresh_source),
|
||||
name="data_integration_externaldatasource_refresh",
|
||||
),
|
||||
]
|
||||
return custom_urls + urls
|
||||
|
||||
|
||||
@admin.register(ChatSession)
|
||||
class ChatSessionAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"session_id",
|
||||
"start_time",
|
||||
"end_time",
|
||||
"country",
|
||||
"language",
|
||||
"messages_sent",
|
||||
"sentiment",
|
||||
)
|
||||
list_filter = ("country", "language", "sentiment")
|
||||
search_fields = ("session_id", "country", "ip_address")
|
||||
readonly_fields = ("session_id",)
|
||||
|
||||
|
||||
@admin.register(ChatMessage)
|
||||
class ChatMessageAdmin(admin.ModelAdmin):
|
||||
list_display = ("session", "sender", "timestamp", "message_preview")
|
||||
list_filter = ("sender", "timestamp")
|
||||
search_fields = ("message", "session__session_id")
|
||||
readonly_fields = ("safe_html_display",)
|
||||
|
||||
@admin.display(description="Message")
|
||||
def message_preview(self, obj):
|
||||
"""Show a preview of the message"""
|
||||
if len(obj.message) > 50:
|
||||
return obj.message[:50] + "..."
|
||||
return obj.message
|
||||
|
||||
@admin.display(description="Sanitized HTML Preview")
|
||||
def safe_html_display(self, obj):
|
||||
"""Display the sanitized HTML"""
|
||||
if obj.safe_html_message:
|
||||
return format_html(
|
||||
'<div style="padding: 10px; border: 1px solid #ccc; background-color: #f9f9f9;">{}</div>',
|
||||
obj.safe_html_message,
|
||||
)
|
||||
return "No HTML content"
|
||||
6
dashboard_project/data_integration/apps.py
Normal file
6
dashboard_project/data_integration/apps.py
Normal file
@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class DataIntegrationConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "data_integration"
|
||||
@ -0,0 +1,27 @@
|
||||
from data_integration.models import ExternalDataSource
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Create default external data source configuration"
|
||||
|
||||
def handle(self, *_args, **_options):
|
||||
if not ExternalDataSource.objects.exists():
|
||||
source = ExternalDataSource.objects.create( # nosec: B106
|
||||
name="Notso AI Chat API",
|
||||
api_url="https://HOST/COMPANY/chats",
|
||||
auth_username="DEFAULT_USERNAME", # Will be set via environment variables
|
||||
auth_password="DEFAULT_PASSWORD", # Will be set via environment variables
|
||||
is_active=True,
|
||||
sync_interval=int(self.get_env_var("CHAT_DATA_FETCH_INTERVAL", "3600")),
|
||||
timeout=int(self.get_env_var("FETCH_DATA_TIMEOUT", "300")),
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f"Created default external data source: {source.name}"))
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS("External data source already exists, no action taken."))
|
||||
|
||||
def get_env_var(self, name, default):
|
||||
"""Get environment variable or return default"""
|
||||
import os
|
||||
|
||||
return os.environ.get(name, default)
|
||||
@ -0,0 +1,11 @@
|
||||
from data_integration.utils import fetch_and_store_chat_data
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fetches chat data from the external API and stores it in the database"
|
||||
|
||||
def handle(self, *_args, **_options): # Mark as unused
|
||||
self.stdout.write(self.style.SUCCESS("Starting data fetch..."))
|
||||
fetch_and_store_chat_data()
|
||||
self.stdout.write(self.style.SUCCESS("Successfully fetched and stored chat data."))
|
||||
@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Migration Fix Script for ExternalDataSource
|
||||
|
||||
This management command adds the missing fields to ExternalDataSource
|
||||
model directly using SQL, which is useful if Django migrations
|
||||
are having issues.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix missing columns in ExternalDataSource table"
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
self.stdout.write("Checking ExternalDataSource schema...")
|
||||
|
||||
# Check if columns exist
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("PRAGMA table_info(data_integration_externaldatasource)")
|
||||
columns = [col[1] for col in cursor.fetchall()]
|
||||
|
||||
missing_columns = []
|
||||
if "error_count" not in columns:
|
||||
missing_columns.append("error_count")
|
||||
if "last_error" not in columns:
|
||||
missing_columns.append("last_error")
|
||||
if "sync_interval" not in columns:
|
||||
missing_columns.append("sync_interval")
|
||||
if "timeout" not in columns:
|
||||
missing_columns.append("timeout")
|
||||
|
||||
if not missing_columns:
|
||||
self.stdout.write(self.style.SUCCESS("✅ All columns exist in ExternalDataSource table"))
|
||||
return
|
||||
|
||||
self.stdout.write(f"Missing columns: {', '.join(missing_columns)}")
|
||||
self.stdout.write("Adding missing columns...")
|
||||
|
||||
try:
|
||||
# Add missing columns with SQLite
|
||||
for col in missing_columns:
|
||||
if col == "error_count":
|
||||
cursor.execute(
|
||||
"ALTER TABLE data_integration_externaldatasource ADD COLUMN error_count integer DEFAULT 0"
|
||||
)
|
||||
elif col == "last_error":
|
||||
cursor.execute(
|
||||
"ALTER TABLE data_integration_externaldatasource ADD COLUMN last_error varchar(255) NULL"
|
||||
)
|
||||
elif col == "sync_interval":
|
||||
cursor.execute(
|
||||
"ALTER TABLE data_integration_externaldatasource ADD COLUMN sync_interval integer DEFAULT 3600"
|
||||
)
|
||||
elif col == "timeout":
|
||||
cursor.execute(
|
||||
"ALTER TABLE data_integration_externaldatasource ADD COLUMN timeout integer DEFAULT 300"
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"✅ Successfully added missing columns: {', '.join(missing_columns)}")
|
||||
)
|
||||
|
||||
# Verify columns were added
|
||||
cursor.execute("PRAGMA table_info(data_integration_externaldatasource)")
|
||||
updated_columns = [col[1] for col in cursor.fetchall()]
|
||||
self.stdout.write(f"Current columns: {', '.join(updated_columns)}")
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Error adding columns: {e}"))
|
||||
self.stdout.write(self.style.WARNING("Consider running Django migrations instead:"))
|
||||
self.stdout.write(" python manage.py makemigrations data_integration")
|
||||
self.stdout.write(" python manage.py migrate data_integration")
|
||||
@ -0,0 +1,47 @@
|
||||
import logging
|
||||
|
||||
from data_integration.tasks import test_task
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test Celery configuration by executing a simple task"
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
self.stdout.write(f"Testing Celery configuration at {timezone.now()}")
|
||||
|
||||
try:
|
||||
# Run the test task
|
||||
self.stdout.write("Submitting test task to Celery...")
|
||||
result = test_task.delay()
|
||||
task_id = result.id
|
||||
|
||||
self.stdout.write(f"Task submitted with ID: {task_id}")
|
||||
self.stdout.write("Waiting for task result (this may take a few seconds)...")
|
||||
|
||||
# Try to get the result with a timeout
|
||||
try:
|
||||
task_result = result.get(timeout=10) # 10 second timeout
|
||||
self.stdout.write(self.style.SUCCESS(f"✅ Task completed successfully with result: {task_result}"))
|
||||
return
|
||||
except TimeoutError:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
"⚠️ Task did not complete within the timeout period. "
|
||||
"This might be normal if Celery worker isn't running."
|
||||
)
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
"To check task status, run Celery worker in another terminal with:\n"
|
||||
" make celery\n"
|
||||
f"And then check status of task {task_id}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Error testing Celery: {e}"))
|
||||
self.stdout.write("Make sure the Celery broker (Redis or SQLite) is properly configured.")
|
||||
self.stdout.write("To start Celery, run:\n make celery")
|
||||
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Test the ExternalDataSource Model Schema
|
||||
|
||||
This management command tests if the ExternalDataSource schema has been correctly updated.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from data_integration.models import ExternalDataSource
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test ExternalDataSource model fields"
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
self.stdout.write("Testing ExternalDataSource schema...")
|
||||
|
||||
try:
|
||||
# Get or create a test source
|
||||
source, created = ExternalDataSource.objects.get_or_create(
|
||||
name="Test Source",
|
||||
defaults={
|
||||
"api_url": "https://example.com/api",
|
||||
"is_active": False,
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
self.stdout.write(f"Created test source with ID: {source.id}")
|
||||
else:
|
||||
self.stdout.write(f"Using existing test source with ID: {source.id}")
|
||||
|
||||
# Test setting each field
|
||||
fields_to_test = {
|
||||
"error_count": 0,
|
||||
"last_error": "Test error message",
|
||||
"sync_interval": 7200,
|
||||
"timeout": 600,
|
||||
}
|
||||
|
||||
for field, value in fields_to_test.items():
|
||||
try:
|
||||
setattr(source, field, value)
|
||||
self.stdout.write(self.style.SUCCESS(f"✅ Successfully set {field} = {value}"))
|
||||
except AttributeError:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Field {field} doesn't exist on the model"))
|
||||
|
||||
try:
|
||||
source.save()
|
||||
self.stdout.write(self.style.SUCCESS("✅ Successfully saved with all fields"))
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Error saving model: {e}"))
|
||||
|
||||
# Read back the values to verify
|
||||
refreshed_source = ExternalDataSource.objects.get(id=source.id)
|
||||
self.stdout.write("\nVerifying saved values:")
|
||||
for field, expected_value in fields_to_test.items():
|
||||
actual_value = getattr(refreshed_source, field, "MISSING")
|
||||
if actual_value == expected_value:
|
||||
self.stdout.write(self.style.SUCCESS(f"✅ {field} = {actual_value} (correct)"))
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR(f"❌ {field} = {actual_value} (expected: {expected_value})"))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Test failed: {e}"))
|
||||
@ -0,0 +1,117 @@
|
||||
import bleach
|
||||
from bleach.css_sanitizer import CSSSanitizer
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test the HTML sanitizer with CSS Sanitizer"
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
# Create a test HTML string with various style attributes
|
||||
test_html = """
|
||||
<div style="color: red; background-color: yellow; transform: rotate(30deg);">
|
||||
<p style="font-size: 16px; margin: 10px;">
|
||||
This is a <span style="font-weight: bold; color: blue;">styled</span> paragraph.
|
||||
</p>
|
||||
<script>alert('XSS attack');</script>
|
||||
<a href="javascript:alert('Evil');" style="text-decoration: none;">Dangerous Link</a>
|
||||
<img src="x" onerror="alert('XSS')" style="border: 1px solid red;">
|
||||
</div>
|
||||
"""
|
||||
|
||||
# Create CSS sanitizer with allowed properties
|
||||
css_sanitizer = CSSSanitizer(
|
||||
allowed_css_properties=[
|
||||
"color",
|
||||
"background-color",
|
||||
"font-family",
|
||||
"font-size",
|
||||
"font-weight",
|
||||
"font-style",
|
||||
"text-decoration",
|
||||
"text-align",
|
||||
"margin",
|
||||
"margin-left",
|
||||
"margin-right",
|
||||
"margin-top",
|
||||
"margin-bottom",
|
||||
"padding",
|
||||
"padding-left",
|
||||
"padding-right",
|
||||
"padding-top",
|
||||
"padding-bottom",
|
||||
"border",
|
||||
"border-radius",
|
||||
"width",
|
||||
"height",
|
||||
"line-height",
|
||||
]
|
||||
)
|
||||
|
||||
# Clean the HTML
|
||||
cleaned_html = bleach.clean(
|
||||
test_html,
|
||||
tags=[
|
||||
"b",
|
||||
"i",
|
||||
"u",
|
||||
"em",
|
||||
"strong",
|
||||
"a",
|
||||
"br",
|
||||
"p",
|
||||
"ul",
|
||||
"ol",
|
||||
"li",
|
||||
"span",
|
||||
"div",
|
||||
"pre",
|
||||
"code",
|
||||
"blockquote",
|
||||
],
|
||||
attributes={
|
||||
"a": ["href", "title", "target"],
|
||||
"span": ["style", "class"],
|
||||
"div": ["style", "class"],
|
||||
"p": ["style", "class"],
|
||||
"pre": ["style", "class"],
|
||||
},
|
||||
css_sanitizer=css_sanitizer,
|
||||
strip=True,
|
||||
)
|
||||
|
||||
# Print the results
|
||||
self.stdout.write(self.style.SUCCESS("Original HTML:"))
|
||||
self.stdout.write(test_html)
|
||||
self.stdout.write("\n\n")
|
||||
self.stdout.write(self.style.SUCCESS("Cleaned HTML:"))
|
||||
self.stdout.write(cleaned_html)
|
||||
self.stdout.write("\n\n")
|
||||
|
||||
# Check if unsafe attributes and styles were removed
|
||||
self.stdout.write(self.style.SUCCESS("Security Checks:"))
|
||||
|
||||
if "script" not in cleaned_html:
|
||||
self.stdout.write(self.style.SUCCESS("✓ Script tags removed"))
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("✗ Script tags found"))
|
||||
|
||||
if "javascript:" not in cleaned_html:
|
||||
self.stdout.write(self.style.SUCCESS("✓ JavaScript URLs removed"))
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("✗ JavaScript URLs found"))
|
||||
|
||||
if "onerror" not in cleaned_html:
|
||||
self.stdout.write(self.style.SUCCESS("✓ Event handlers removed"))
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("✗ Event handlers found"))
|
||||
|
||||
if "transform" not in cleaned_html:
|
||||
self.stdout.write(self.style.SUCCESS("✓ Unsafe CSS properties removed"))
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("✗ Unsafe CSS properties found"))
|
||||
|
||||
if "img" not in cleaned_html:
|
||||
self.stdout.write(self.style.SUCCESS("✓ Unsupported tags removed"))
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("✗ Unsupported tags found"))
|
||||
@ -0,0 +1,68 @@
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test Redis connection for Celery"
|
||||
|
||||
def handle(self, *args, **options): # noqa: ARG002
|
||||
self.stdout.write("Testing Redis connection...")
|
||||
|
||||
try:
|
||||
import redis
|
||||
|
||||
# Get Redis configuration from settings
|
||||
redis_host = getattr(settings, "REDIS_HOST", "localhost")
|
||||
redis_port = int(getattr(settings, "REDIS_PORT", 6379))
|
||||
redis_db = int(getattr(settings, "REDIS_DB", 0))
|
||||
|
||||
# Override from environment if set
|
||||
import os
|
||||
|
||||
if "REDIS_URL" in os.environ:
|
||||
self.stdout.write(f"REDIS_URL environment variable found: {os.environ['REDIS_URL']}")
|
||||
|
||||
# Try to connect and ping
|
||||
redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db, socket_connect_timeout=2)
|
||||
|
||||
ping_result = redis_client.ping()
|
||||
|
||||
if ping_result:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"✅ Redis connection successful! Connected to {redis_host}:{redis_port}/{redis_db}"
|
||||
)
|
||||
)
|
||||
self.stdout.write(f"Broker URL: {settings.CELERY_BROKER_URL}")
|
||||
self.stdout.write(f"Result backend: {settings.CELERY_RESULT_BACKEND}")
|
||||
|
||||
# Try to set and get a value
|
||||
test_key = "test_redis_connection"
|
||||
test_value = "success"
|
||||
redis_client.set(test_key, test_value)
|
||||
retrieved_value = redis_client.get(test_key)
|
||||
|
||||
if retrieved_value and retrieved_value.decode() == test_value:
|
||||
self.stdout.write(self.style.SUCCESS("✅ Redis SET/GET test passed!"))
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"⚠️ Redis SET/GET test failed: Got {retrieved_value} instead of {test_value}"
|
||||
)
|
||||
)
|
||||
|
||||
# Clean up
|
||||
redis_client.delete(test_key)
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("❌ Redis ping failed!"))
|
||||
except redis.exceptions.ConnectionError as e:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Redis connection error: {e}"))
|
||||
self.stdout.write("Celery will use SQLite fallback if configured.")
|
||||
except ImportError:
|
||||
self.stdout.write(self.style.ERROR("❌ Redis package not installed. Install with: pip install redis"))
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"❌ Error: {e}"))
|
||||
@ -0,0 +1,99 @@
|
||||
# Generated by Django 5.2.1 on 2025-05-17 21:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ChatSession",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("session_id", models.CharField(max_length=255, unique=True)),
|
||||
("start_time", models.DateTimeField()),
|
||||
("end_time", models.DateTimeField()),
|
||||
("ip_address", models.GenericIPAddressField(blank=True, null=True)),
|
||||
("country", models.CharField(blank=True, max_length=255, null=True)),
|
||||
("language", models.CharField(blank=True, max_length=255, null=True)),
|
||||
("messages_sent", models.IntegerField(blank=True, null=True)),
|
||||
("sentiment", models.CharField(blank=True, max_length=255, null=True)),
|
||||
("escalated", models.BooleanField(blank=True, null=True)),
|
||||
("forwarded_hr", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"full_transcript_url",
|
||||
models.URLField(blank=True, max_length=1024, null=True),
|
||||
),
|
||||
("avg_response_time", models.FloatField(blank=True, null=True)),
|
||||
("tokens", models.IntegerField(blank=True, null=True)),
|
||||
("tokens_eur", models.FloatField(blank=True, null=True)),
|
||||
("category", models.CharField(blank=True, max_length=255, null=True)),
|
||||
("initial_msg", models.TextField(blank=True, null=True)),
|
||||
("user_rating", models.IntegerField(blank=True, null=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ExternalDataSource",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(default="External API", max_length=255)),
|
||||
("api_url", models.URLField(default="https://proto.notso.ai/XY/chats")),
|
||||
(
|
||||
"auth_username",
|
||||
models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
(
|
||||
"auth_password",
|
||||
models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
("last_synced", models.DateTimeField(blank=True, null=True)),
|
||||
("is_active", models.BooleanField(default=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ChatMessage",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True)),
|
||||
("sender", models.CharField(max_length=255)),
|
||||
("message", models.TextField()),
|
||||
("safe_html_message", models.TextField(blank=True, null=True)),
|
||||
(
|
||||
"session",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="messages",
|
||||
to="data_integration.chatsession",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,43 @@
|
||||
# Generated by Django 5.2.1 on 2025-05-17 22:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("data_integration", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="externaldatasource",
|
||||
name="error_count",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="externaldatasource",
|
||||
name="last_error",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="externaldatasource",
|
||||
name="sync_interval",
|
||||
field=models.IntegerField(
|
||||
default=3600,
|
||||
help_text="Sync interval in seconds. Default is 3600 (1 hour)",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="externaldatasource",
|
||||
name="timeout",
|
||||
field=models.IntegerField(
|
||||
default=300,
|
||||
help_text="Timeout in seconds for each sync operation. Default is 300 (5 minutes)",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="externaldatasource",
|
||||
name="api_url",
|
||||
field=models.URLField(default="https://proto.notso.ai/jumbo/chats"),
|
||||
),
|
||||
]
|
||||
78
dashboard_project/data_integration/models.py
Normal file
78
dashboard_project/data_integration/models.py
Normal file
@ -0,0 +1,78 @@
|
||||
import os
|
||||
|
||||
from django.db import models
|
||||
|
||||
|
||||
class ChatSession(models.Model):
|
||||
session_id = models.CharField(max_length=255, unique=True)
|
||||
start_time = models.DateTimeField()
|
||||
end_time = models.DateTimeField()
|
||||
ip_address = models.GenericIPAddressField(null=True, blank=True)
|
||||
country = models.CharField(max_length=255, null=True, blank=True)
|
||||
language = models.CharField(max_length=255, null=True, blank=True)
|
||||
messages_sent = models.IntegerField(null=True, blank=True)
|
||||
sentiment = models.CharField(max_length=255, null=True, blank=True)
|
||||
escalated = models.BooleanField(null=True, blank=True)
|
||||
forwarded_hr = models.BooleanField(null=True, blank=True)
|
||||
full_transcript_url = models.URLField(max_length=1024, null=True, blank=True)
|
||||
avg_response_time = models.FloatField(null=True, blank=True)
|
||||
tokens = models.IntegerField(null=True, blank=True)
|
||||
tokens_eur = models.FloatField(null=True, blank=True)
|
||||
category = models.CharField(max_length=255, null=True, blank=True)
|
||||
initial_msg = models.TextField(null=True, blank=True)
|
||||
user_rating = models.IntegerField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.session_id
|
||||
|
||||
|
||||
class ChatMessage(models.Model):
|
||||
session = models.ForeignKey(ChatSession, related_name="messages", on_delete=models.CASCADE)
|
||||
timestamp = models.DateTimeField(auto_now_add=True) # Changed to auto_now_add for simplicity
|
||||
sender = models.CharField(max_length=255) # "User" or "Assistant"
|
||||
message = models.TextField()
|
||||
safe_html_message = models.TextField(blank=True, null=True) # For storing sanitized HTML
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.session.session_id} - {self.sender} at {self.timestamp}"
|
||||
|
||||
|
||||
class ExternalDataSource(models.Model):
|
||||
name = models.CharField(max_length=255, default="External API")
|
||||
api_url = models.URLField(default="https://proto.notso.ai/jumbo/chats")
|
||||
auth_username = models.CharField(max_length=255, blank=True, null=True)
|
||||
auth_password = models.CharField(
|
||||
max_length=255, blank=True, null=True
|
||||
) # Consider using a more secure way to store credentials
|
||||
last_synced = models.DateTimeField(null=True, blank=True)
|
||||
is_active = models.BooleanField(default=True)
|
||||
error_count = models.IntegerField(default=0)
|
||||
last_error = models.CharField(max_length=255, blank=True, null=True)
|
||||
sync_interval = models.IntegerField(default=3600, help_text="Sync interval in seconds. Default is 3600 (1 hour)")
|
||||
timeout = models.IntegerField(
|
||||
default=300,
|
||||
help_text="Timeout in seconds for each sync operation. Default is 300 (5 minutes)",
|
||||
)
|
||||
|
||||
def get_auth_username(self):
|
||||
"""Get username from environment variable if set, otherwise use stored value"""
|
||||
env_username = os.environ.get("EXTERNAL_API_USERNAME")
|
||||
return env_username if env_username else self.auth_username
|
||||
|
||||
def get_auth_password(self):
|
||||
"""Get password from environment variable if set, otherwise use stored value"""
|
||||
env_password = os.environ.get("EXTERNAL_API_PASSWORD")
|
||||
return env_password if env_password else self.auth_password
|
||||
|
||||
def get_status(self):
|
||||
"""Get the status of this data source"""
|
||||
if not self.is_active:
|
||||
return "Inactive"
|
||||
if not self.last_synced:
|
||||
return "Never synced"
|
||||
if self.error_count > 0:
|
||||
return f"Error ({self.error_count})"
|
||||
return "Active"
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
116
dashboard_project/data_integration/tasks.py
Normal file
116
dashboard_project/data_integration/tasks.py
Normal file
@ -0,0 +1,116 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from celery import shared_task
|
||||
from django.db import utils as django_db_utils
|
||||
from django.utils import timezone
|
||||
|
||||
from .models import ExternalDataSource
|
||||
from .utils import fetch_and_store_chat_data
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(name="data_integration.tasks.test_task", bind=True)
|
||||
def test_task(self):
|
||||
"""A simple test task to verify Celery is working without external dependencies."""
|
||||
logger.info("Test task executed at %s (task_id: %s)", timezone.now(), self.request.id)
|
||||
return "Test task completed successfully!"
|
||||
|
||||
|
||||
@shared_task(
|
||||
name="data_integration.tasks.periodic_fetch_chat_data",
|
||||
bind=True,
|
||||
autoretry_for=(Exception,),
|
||||
retry_kwargs={"max_retries": 3, "countdown": 60},
|
||||
soft_time_limit=int(os.environ.get("FETCH_DATA_TIMEOUT", 300)), # 5 minutes default
|
||||
)
|
||||
def periodic_fetch_chat_data(self):
|
||||
"""Periodically fetch and process chat data from external sources.
|
||||
|
||||
This task:
|
||||
1. Fetches data from all active external data sources
|
||||
2. Processes and stores the data in the database
|
||||
3. Updates the last_synced timestamp on each source
|
||||
4. Handles errors with retries
|
||||
"""
|
||||
logger.info("Starting periodic chat data fetch (task_id: %s)...", self.request.id)
|
||||
try:
|
||||
# Get all active data sources
|
||||
active_sources = ExternalDataSource.objects.filter(is_active=True)
|
||||
|
||||
if not active_sources.exists():
|
||||
logger.warning("No active external data sources found. Skipping fetch.")
|
||||
return "No active data sources found"
|
||||
|
||||
successful_sources = []
|
||||
failed_sources = []
|
||||
|
||||
for source in active_sources:
|
||||
try:
|
||||
logger.info(f"Processing source: {source.name} (ID: {source.id})")
|
||||
fetch_and_store_chat_data(source_id=source.id)
|
||||
source.last_synced = timezone.now()
|
||||
# Check if error_count field exists in the model
|
||||
update_fields = ["last_synced"]
|
||||
try:
|
||||
source.error_count = 0
|
||||
source.last_error = None
|
||||
update_fields.extend(["error_count", "last_error"])
|
||||
except AttributeError:
|
||||
# Fields might not exist yet if migrations haven't been applied
|
||||
logger.warning("New fields not available. Run migrations to enable error tracking.")
|
||||
source.save(update_fields=update_fields)
|
||||
successful_sources.append(source.name)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching data from source {source.name}: {e}", exc_info=True)
|
||||
try:
|
||||
source.error_count = getattr(source, "error_count", 0) + 1
|
||||
source.last_error = str(e)[:255] # Truncate to fit in the field
|
||||
source.save(update_fields=["error_count", "last_error"])
|
||||
except (AttributeError, django_db_utils.OperationalError):
|
||||
# If fields don't exist, just update last_synced
|
||||
logger.warning("Could not update error fields. Run migrations to enable error tracking.")
|
||||
source.last_synced = timezone.now()
|
||||
source.save(update_fields=["last_synced"])
|
||||
failed_sources.append(source.name)
|
||||
|
||||
if failed_sources and not successful_sources:
|
||||
# If all sources failed, we should raise an exception to trigger retry
|
||||
raise Exception(f"All data sources failed: {', '.join(failed_sources)}")
|
||||
|
||||
result_message = f"Completed: {len(successful_sources)} successful, {len(failed_sources)} failed"
|
||||
logger.info(result_message)
|
||||
return result_message
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during periodic chat data fetch: {e}", exc_info=True)
|
||||
raise # Re-raise to trigger Celery retry
|
||||
|
||||
|
||||
@shared_task(name="data_integration.tasks.refresh_specific_source", bind=True)
|
||||
def refresh_specific_source(self, source_id):
|
||||
"""Manually refresh a specific data source.
|
||||
|
||||
Args:
|
||||
source_id: ID of the ExternalDataSource to refresh
|
||||
"""
|
||||
logger.info(f"Starting manual refresh of data source ID: {source_id} (task_id: {self.request.id})")
|
||||
try:
|
||||
source = ExternalDataSource.objects.get(id=source_id)
|
||||
fetch_and_store_chat_data(source_id=source_id)
|
||||
source.last_synced = timezone.now()
|
||||
source.error_count = 0
|
||||
source.last_error = None
|
||||
source.save(update_fields=["last_synced", "error_count", "last_error"])
|
||||
logger.info(f"Manual refresh of data source {source.name} completed successfully")
|
||||
return f"Successfully refreshed data source: {source.name}"
|
||||
except ExternalDataSource.DoesNotExist:
|
||||
logger.error(f"Data source with ID {source_id} does not exist")
|
||||
return f"Error: Data source with ID {source_id} does not exist"
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error during manual refresh of data source {source_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return f"Error: {str(e)}"
|
||||
1
dashboard_project/data_integration/tests.py
Normal file
1
dashboard_project/data_integration/tests.py
Normal file
@ -0,0 +1 @@
|
||||
# Create your tests here.
|
||||
14
dashboard_project/data_integration/urls.py
Normal file
14
dashboard_project/data_integration/urls.py
Normal file
@ -0,0 +1,14 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
app_name = "data_integration"
|
||||
|
||||
urlpatterns = [
|
||||
path("manual-refresh/", views.manual_data_refresh, name="manual_data_refresh"),
|
||||
path(
|
||||
"refresh/<int:source_id>/",
|
||||
views.refresh_specific_datasource,
|
||||
name="refresh_specific_datasource",
|
||||
),
|
||||
]
|
||||
340
dashboard_project/data_integration/utils.py
Normal file
340
dashboard_project/data_integration/utils.py
Normal file
@ -0,0 +1,340 @@
|
||||
import csv
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import bleach
|
||||
import requests
|
||||
from bleach.css_sanitizer import CSSSanitizer
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
from .models import ChatMessage, ChatSession, ExternalDataSource
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
EXPECTED_HEADERS = [
|
||||
"session_id",
|
||||
"start_time",
|
||||
"end_time",
|
||||
"ip_address",
|
||||
"country",
|
||||
"language",
|
||||
"messages_sent",
|
||||
"sentiment",
|
||||
"escalated",
|
||||
"forwarded_hr",
|
||||
"full_transcript",
|
||||
"avg_response_time",
|
||||
"tokens",
|
||||
"tokens_eur",
|
||||
"category",
|
||||
"initial_msg",
|
||||
"user_rating",
|
||||
]
|
||||
|
||||
|
||||
def fetch_and_store_chat_data(source_id=None):
|
||||
"""Fetch chat data from an external API and store it in the database.
|
||||
|
||||
Args:
|
||||
source_id: Optional ID of specific ExternalDataSource to use.
|
||||
If None, will use the first active source.
|
||||
|
||||
Returns:
|
||||
dict: Stats about the operation (sessions created, updated, errors)
|
||||
"""
|
||||
if source_id:
|
||||
source = ExternalDataSource.objects.filter(id=source_id, is_active=True).first()
|
||||
if not source:
|
||||
logger.error(f"Data source with ID {source_id} not found or not active.")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Data source with ID {source_id} not found or not active.",
|
||||
}
|
||||
else:
|
||||
source = ExternalDataSource.objects.filter(is_active=True).first()
|
||||
if not source:
|
||||
logger.warning("No active data source found.")
|
||||
return {"success": False, "error": "No active data source found."}
|
||||
|
||||
stats = {
|
||||
"sessions_created": 0,
|
||||
"sessions_updated": 0,
|
||||
"transcripts_processed": 0,
|
||||
"errors": 0,
|
||||
"success": True,
|
||||
}
|
||||
|
||||
try:
|
||||
# Fetch data from API with timeout from source settings or default
|
||||
timeout = getattr(source, "timeout", 30)
|
||||
response = requests.get(
|
||||
source.api_url,
|
||||
auth=((source.get_auth_username(), source.get_auth_password()) if source.get_auth_username() else None),
|
||||
timeout=timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except requests.RequestException as e:
|
||||
error_msg = f"Error fetching data from API {source.api_url}: {e}"
|
||||
logger.error(error_msg)
|
||||
return {"success": False, "error": error_msg}
|
||||
|
||||
# Process CSV data
|
||||
csv_data = response.content.decode("utf-8").splitlines()
|
||||
reader = csv.reader(csv_data)
|
||||
# Skip header if present, or use predefined if not
|
||||
# header = next(reader) # Assuming the first row is a header
|
||||
# For this specific case, we know the header is missing.
|
||||
header = EXPECTED_HEADERS
|
||||
|
||||
for row in reader:
|
||||
if not row: # Skip empty rows
|
||||
continue
|
||||
try:
|
||||
# Fix for zip() argument mismatch: pad the row with empty strings if needed
|
||||
padded_row = row + [""] * (len(header) - len(row))
|
||||
data = dict(zip(header, padded_row, strict=False))
|
||||
|
||||
try:
|
||||
# Try European date format (DD.MM.YYYY) first
|
||||
start_time = make_aware(datetime.strptime(data["start_time"], "%d.%m.%Y %H:%M:%S"))
|
||||
except ValueError:
|
||||
# Fallback to ISO format (YYYY-MM-DD)
|
||||
start_time = make_aware(datetime.strptime(data["start_time"], "%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
try:
|
||||
# Try European date format (DD.MM.YYYY) first
|
||||
end_time = make_aware(datetime.strptime(data["end_time"], "%d.%m.%Y %H:%M:%S"))
|
||||
except ValueError:
|
||||
# Fallback to ISO format (YYYY-MM-DD)
|
||||
end_time = make_aware(datetime.strptime(data["end_time"], "%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
messages_sent = int(data["messages_sent"]) if data["messages_sent"] else None
|
||||
escalated = data["escalated"].lower() == "true" if data["escalated"] else None
|
||||
forwarded_hr = data["forwarded_hr"].lower() == "true" if data["forwarded_hr"] else None
|
||||
avg_response_time = float(data["avg_response_time"]) if data["avg_response_time"] else None
|
||||
tokens = int(data["tokens"]) if data["tokens"] else None
|
||||
tokens_eur = float(data["tokens_eur"]) if data["tokens_eur"] else None
|
||||
user_rating = int(data["user_rating"]) if data["user_rating"] and data["user_rating"].isdigit() else None
|
||||
|
||||
session, created = ChatSession.objects.update_or_create(
|
||||
session_id=data["session_id"],
|
||||
defaults={
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"ip_address": data.get("ip_address"),
|
||||
"country": data.get("country"),
|
||||
"language": data.get("language"),
|
||||
"messages_sent": messages_sent,
|
||||
"sentiment": data.get("sentiment"),
|
||||
"escalated": escalated,
|
||||
"forwarded_hr": forwarded_hr,
|
||||
"full_transcript_url": data.get("full_transcript"),
|
||||
"avg_response_time": avg_response_time,
|
||||
"tokens": tokens,
|
||||
"tokens_eur": tokens_eur,
|
||||
"category": data.get("category"),
|
||||
"initial_msg": data.get("initial_msg"),
|
||||
"user_rating": user_rating,
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
stats["sessions_created"] += 1
|
||||
logger.info(f"Created session: {session.session_id}")
|
||||
else:
|
||||
stats["sessions_updated"] += 1
|
||||
logger.info(f"Updated session: {session.session_id}")
|
||||
|
||||
# Fetch and process transcript if URL is present
|
||||
if session.full_transcript_url:
|
||||
transcript_result = fetch_and_store_transcript(session, timeout)
|
||||
if transcript_result["success"]:
|
||||
stats["transcripts_processed"] += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing row: {row}. Error: {e}", exc_info=True)
|
||||
stats["errors"] += 1
|
||||
continue
|
||||
|
||||
source.last_synced = make_aware(datetime.now())
|
||||
source.save()
|
||||
logger.info("Data sync complete. Stats: {stats}")
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def fetch_and_store_transcript(session, timeout=30):
|
||||
"""Fetch and process transcript for a chat session.
|
||||
|
||||
Args:
|
||||
session: The ChatSession object
|
||||
timeout: Timeout in seconds for the request
|
||||
|
||||
Returns:
|
||||
dict: Result of the operation
|
||||
"""
|
||||
result = {"success": False, "messages_created": 0, "error": None}
|
||||
|
||||
try:
|
||||
transcript_response = requests.get(session.full_transcript_url, timeout=timeout)
|
||||
transcript_response.raise_for_status()
|
||||
transcript_content = transcript_response.content.decode("utf-8")
|
||||
messages_created = parse_and_store_transcript_messages(session, transcript_content)
|
||||
|
||||
result["success"] = True
|
||||
result["messages_created"] = messages_created
|
||||
return result
|
||||
except requests.RequestException as e:
|
||||
error_msg = f"Error fetching transcript for session {session.session_id}: {e}"
|
||||
logger.error(error_msg)
|
||||
result["error"] = error_msg
|
||||
return result
|
||||
except Exception as e:
|
||||
error_msg = f"Error processing transcript for session {session.session_id}: {e}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
result["error"] = error_msg
|
||||
return result
|
||||
|
||||
|
||||
def parse_and_store_transcript_messages(session, transcript_content):
|
||||
"""Parse and store messages from a transcript.
|
||||
|
||||
Args:
|
||||
session: The ChatSession object
|
||||
transcript_content: The raw transcript content
|
||||
|
||||
Returns:
|
||||
int: Number of messages created
|
||||
"""
|
||||
lines = transcript_content.splitlines()
|
||||
current_sender = None
|
||||
current_message_lines = []
|
||||
messages_created = 0
|
||||
|
||||
# First, delete existing messages for this session to avoid duplicates
|
||||
existing_count = ChatMessage.objects.filter(session=session).count()
|
||||
if existing_count > 0:
|
||||
logger.info(f"Deleting {existing_count} existing messages for session {session.session_id}")
|
||||
ChatMessage.objects.filter(session=session).delete()
|
||||
|
||||
for line in lines:
|
||||
if line.startswith("User:"):
|
||||
if (
|
||||
current_sender
|
||||
and current_message_lines
|
||||
and save_message(session, current_sender, "\n".join(current_message_lines))
|
||||
):
|
||||
messages_created += 1
|
||||
current_sender = "User"
|
||||
current_message_lines = [line.replace("User:", "").strip()]
|
||||
elif line.startswith("Assistant:"):
|
||||
if (
|
||||
current_sender
|
||||
and current_message_lines
|
||||
and save_message(session, current_sender, "\n".join(current_message_lines))
|
||||
):
|
||||
messages_created += 1
|
||||
current_sender = "Assistant"
|
||||
current_message_lines = [line.replace("Assistant:", "").strip()]
|
||||
elif current_sender:
|
||||
current_message_lines.append(line.strip())
|
||||
|
||||
# Save the last message
|
||||
if (
|
||||
current_sender
|
||||
and current_message_lines
|
||||
and save_message(session, current_sender, "\n".join(current_message_lines))
|
||||
):
|
||||
messages_created += 1
|
||||
|
||||
logger.info(f"Created {messages_created} messages for session {session.session_id}")
|
||||
return messages_created
|
||||
|
||||
|
||||
def save_message(session, sender, message_text):
|
||||
"""Save a message for a chat session.
|
||||
|
||||
Args:
|
||||
session: The ChatSession object
|
||||
sender: The sender of the message ("User" or "Assistant")
|
||||
message_text: The message text, which may contain HTML
|
||||
|
||||
Returns:
|
||||
bool: True if message was created, False otherwise
|
||||
"""
|
||||
if not message_text.strip():
|
||||
return False
|
||||
|
||||
try:
|
||||
# Create a CSS sanitizer with allowed CSS properties
|
||||
css_sanitizer = CSSSanitizer(
|
||||
allowed_css_properties=[
|
||||
"color",
|
||||
"background-color",
|
||||
"font-family",
|
||||
"font-size",
|
||||
"font-weight",
|
||||
"font-style",
|
||||
"text-decoration",
|
||||
"text-align",
|
||||
"margin",
|
||||
"margin-left",
|
||||
"margin-right",
|
||||
"margin-top",
|
||||
"margin-bottom",
|
||||
"padding",
|
||||
"padding-left",
|
||||
"padding-right",
|
||||
"padding-top",
|
||||
"padding-bottom",
|
||||
"border",
|
||||
"border-radius",
|
||||
"width",
|
||||
"height",
|
||||
"line-height",
|
||||
]
|
||||
)
|
||||
|
||||
# Sanitize HTML content before saving if necessary
|
||||
safe_html = bleach.clean(
|
||||
message_text,
|
||||
tags=[
|
||||
"b",
|
||||
"i",
|
||||
"u",
|
||||
"em",
|
||||
"strong",
|
||||
"a",
|
||||
"br",
|
||||
"p",
|
||||
"ul",
|
||||
"ol",
|
||||
"li",
|
||||
"span",
|
||||
"div",
|
||||
"pre",
|
||||
"code",
|
||||
"blockquote",
|
||||
],
|
||||
attributes={
|
||||
"a": ["href", "title", "target"],
|
||||
"span": ["style", "class"],
|
||||
"div": ["style", "class"],
|
||||
"p": ["style", "class"],
|
||||
"pre": ["style", "class"],
|
||||
},
|
||||
css_sanitizer=css_sanitizer,
|
||||
strip=True,
|
||||
)
|
||||
|
||||
ChatMessage.objects.create(
|
||||
session=session,
|
||||
sender=sender,
|
||||
message=message_text,
|
||||
safe_html_message=safe_html,
|
||||
)
|
||||
logger.debug(f"Stored message for session {session.session_id} from {sender}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving message for session {session.session_id}: {e}", exc_info=True)
|
||||
return False
|
||||
54
dashboard_project/data_integration/views.py
Normal file
54
dashboard_project/data_integration/views.py
Normal file
@ -0,0 +1,54 @@
|
||||
from django.contrib import messages
|
||||
from django.contrib.admin.views.decorators import staff_member_required
|
||||
from django.contrib.auth.decorators import login_required, user_passes_test
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
|
||||
from .models import ExternalDataSource
|
||||
from .tasks import periodic_fetch_chat_data, refresh_specific_source
|
||||
from .utils import fetch_and_store_chat_data
|
||||
|
||||
# Create your views here.
|
||||
|
||||
|
||||
def is_superuser(user):
|
||||
return user.is_superuser
|
||||
|
||||
|
||||
@login_required
|
||||
@user_passes_test(is_superuser)
|
||||
def manual_data_refresh(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
# Try to use Celery first
|
||||
try:
|
||||
# Asynchronous with Celery
|
||||
periodic_fetch_chat_data.delay()
|
||||
messages.success(
|
||||
request,
|
||||
"Manual data refresh triggered successfully. The data will be updated shortly.",
|
||||
)
|
||||
except Exception:
|
||||
# Fall back to synchronous if Celery is not available
|
||||
fetch_and_store_chat_data()
|
||||
messages.success(
|
||||
request,
|
||||
"Manual data refresh completed successfully (synchronous mode).",
|
||||
)
|
||||
except Exception as e:
|
||||
messages.error(request, f"Failed to refresh data: {e}")
|
||||
return redirect(request.headers.get("referer", "dashboard")) # Redirect to previous page or dashboard
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def refresh_specific_datasource(request, source_id):
|
||||
"""View to trigger refresh of a specific data source. Used as a backup for admin URLs."""
|
||||
source = get_object_or_404(ExternalDataSource, pk=source_id)
|
||||
|
||||
try:
|
||||
# Try to use Celery
|
||||
task = refresh_specific_source.delay(source_id)
|
||||
messages.success(request, f"Data refresh task started for {source.name} (Task ID: {task.id})")
|
||||
except Exception as e:
|
||||
messages.error(request, f"Failed to refresh data source {source.name}: {e}")
|
||||
|
||||
return redirect(request.headers.get("referer", "/admin/data_integration/externaldatasource/"))
|
||||
42
dashboard_project/scripts/cleanup_duplicates.py
Normal file
42
dashboard_project/scripts/cleanup_duplicates.py
Normal file
@ -0,0 +1,42 @@
|
||||
# dashboard_project/scripts/cleanup_duplicates.py
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add the project root to sys.path
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
|
||||
import django # noqa: I001
|
||||
|
||||
django.setup()
|
||||
|
||||
from dashboard.models import ChatSession # noqa: E402, I001
|
||||
from django.db.models import Count # noqa: E402
|
||||
|
||||
|
||||
def cleanup_duplicates():
|
||||
print("Looking for duplicate ChatSessions...")
|
||||
duplicates = ChatSession.objects.values("session_id", "data_source").annotate(count=Count("id")).filter(count__gt=1)
|
||||
|
||||
total_deleted = 0
|
||||
for dup in duplicates:
|
||||
session_id = dup["session_id"]
|
||||
data_source = dup["data_source"]
|
||||
# Get all ids for this duplicate group, order by id (keep the first, delete the rest)
|
||||
ids = list(
|
||||
ChatSession.objects.filter(session_id=session_id, data_source=data_source)
|
||||
.order_by("id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
# Keep the first, delete the rest
|
||||
to_delete = ids[1:]
|
||||
deleted, _ = ChatSession.objects.filter(id__in=to_delete).delete()
|
||||
total_deleted += deleted
|
||||
print(f"Removed {deleted} duplicates for session_id={session_id}, data_source={data_source}")
|
||||
|
||||
print(f"Done. Total duplicates removed: {total_deleted}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cleanup_duplicates()
|
||||
200
dashboard_project/scripts/fix_dashboard_data.py
Executable file
200
dashboard_project/scripts/fix_dashboard_data.py
Executable file
@ -0,0 +1,200 @@
|
||||
#!/usr/bin/env python
|
||||
# scripts/fix_dashboard_data.py
|
||||
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import django
|
||||
from accounts.models import Company
|
||||
from dashboard.models import ChatSession as DashboardChatSession
|
||||
from dashboard.models import DataSource
|
||||
from data_integration.models import ChatSession as ExternalChatSession
|
||||
from data_integration.models import ExternalDataSource
|
||||
from django.db import transaction
|
||||
from django.utils.timezone import make_aware
|
||||
|
||||
# Set up Django environment
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
|
||||
django.setup()
|
||||
|
||||
# SCRIPT CONFIG
|
||||
CREATE_TEST_DATA = False # Set to True to create sample data if none exists
|
||||
COMPANY_NAME = "Notso AI" # The company name to use
|
||||
|
||||
|
||||
def main():
|
||||
print("Starting dashboard data fix...")
|
||||
|
||||
# Get company
|
||||
company = Company.objects.filter(name=COMPANY_NAME).first()
|
||||
if not company:
|
||||
print(f"Error: Company '{COMPANY_NAME}' not found.")
|
||||
print("Available companies:")
|
||||
for c in Company.objects.all():
|
||||
print(f" - {c.name} (ID: {c.id})")
|
||||
return
|
||||
|
||||
print(f"Using company: {company.name} (ID: {company.id})")
|
||||
|
||||
# Get or create external data source
|
||||
ext_source, created = ExternalDataSource.objects.get_or_create(
|
||||
name="External API Data",
|
||||
defaults={
|
||||
"api_url": "https://proto.notso.ai/jumbo/chats",
|
||||
"is_active": True,
|
||||
"sync_interval": 3600,
|
||||
"last_synced": make_aware(datetime.now()),
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
print(f"Created external data source: {ext_source.name} (ID: {ext_source.id})")
|
||||
else:
|
||||
print(f"Using existing external data source: {ext_source.name} (ID: {ext_source.id})")
|
||||
|
||||
# Get or create dashboard data source linked to external source
|
||||
dash_source, created = DataSource.objects.get_or_create(
|
||||
external_source=ext_source,
|
||||
company=company,
|
||||
defaults={
|
||||
"name": "External API Data",
|
||||
"description": "External data source for chat analytics",
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
print(f"Created dashboard data source: {dash_source.name} (ID: {dash_source.id})")
|
||||
else:
|
||||
print(f"Using existing dashboard data source: {dash_source.name} (ID: {dash_source.id})")
|
||||
|
||||
# Check for external chat sessions
|
||||
ext_count = ExternalChatSession.objects.count()
|
||||
print(f"Found {ext_count} external chat sessions")
|
||||
|
||||
if ext_count == 0 and CREATE_TEST_DATA:
|
||||
print("No external chat sessions found. Creating test data...")
|
||||
create_test_data(ext_source)
|
||||
|
||||
# Sync data from external to dashboard
|
||||
sync_data(ext_source, dash_source)
|
||||
|
||||
print("Done! Check your dashboard for data.")
|
||||
|
||||
|
||||
def create_test_data(ext_source):
|
||||
"""Create sample chat sessions in the external data source"""
|
||||
sessions_created = 0
|
||||
|
||||
# Create test data with association to the external data source
|
||||
test_data = [
|
||||
{
|
||||
"session_id": "test-session-1",
|
||||
"start_time": make_aware(datetime.strptime("01.05.2025 10:00:00", "%d.%m.%Y %H:%M:%S")),
|
||||
"end_time": make_aware(datetime.strptime("01.05.2025 10:15:00", "%d.%m.%Y %H:%M:%S")),
|
||||
"country": "Netherlands",
|
||||
"language": "Dutch",
|
||||
"messages_sent": 10,
|
||||
"sentiment": "Positive",
|
||||
"initial_msg": "Can you help me with my order?",
|
||||
},
|
||||
{
|
||||
"session_id": "test-session-2",
|
||||
"start_time": make_aware(datetime.strptime("02.05.2025 14:30:00", "%d.%m.%Y %H:%M:%S")),
|
||||
"end_time": make_aware(datetime.strptime("02.05.2025 14:45:00", "%d.%m.%Y %H:%M:%S")),
|
||||
"country": "Belgium",
|
||||
"language": "French",
|
||||
"messages_sent": 12,
|
||||
"sentiment": "Neutral",
|
||||
"initial_msg": "Je cherche des informations sur les produits.",
|
||||
},
|
||||
{
|
||||
"session_id": "test-session-3",
|
||||
"start_time": make_aware(datetime.strptime("03.05.2025 09:15:00", "%d.%m.%Y %H:%M:%S")),
|
||||
"end_time": make_aware(datetime.strptime("03.05.2025 09:30:00", "%d.%m.%Y %H:%M:%S")),
|
||||
"country": "Germany",
|
||||
"language": "German",
|
||||
"messages_sent": 8,
|
||||
"sentiment": "Negative",
|
||||
"initial_msg": "Ich habe ein Problem mit meiner Bestellung.",
|
||||
},
|
||||
]
|
||||
|
||||
for data in test_data:
|
||||
ExternalChatSession.objects.create(
|
||||
session_id=data["session_id"],
|
||||
start_time=data["start_time"],
|
||||
end_time=data["end_time"],
|
||||
ip_address="127.0.0.1",
|
||||
country=data["country"],
|
||||
language=data["language"],
|
||||
messages_sent=data["messages_sent"],
|
||||
sentiment=data["sentiment"],
|
||||
escalated=False,
|
||||
forwarded_hr=False,
|
||||
initial_msg=data["initial_msg"],
|
||||
user_rating=5,
|
||||
external_source=ext_source,
|
||||
)
|
||||
sessions_created += 1
|
||||
|
||||
print(f"Created {sessions_created} test sessions")
|
||||
|
||||
|
||||
def sync_data(ext_source, dash_source):
|
||||
"""Sync data from external data source to dashboard data source"""
|
||||
external_sessions = ExternalChatSession.objects.filter(external_source=ext_source)
|
||||
session_count = external_sessions.count()
|
||||
|
||||
if session_count == 0:
|
||||
print("No external sessions to sync")
|
||||
return
|
||||
|
||||
print(f"Syncing {session_count} sessions...")
|
||||
|
||||
# Clear existing data
|
||||
existing_count = DashboardChatSession.objects.filter(data_source=dash_source).count()
|
||||
if existing_count > 0:
|
||||
print(f"Clearing {existing_count} existing dashboard sessions")
|
||||
DashboardChatSession.objects.filter(data_source=dash_source).delete()
|
||||
|
||||
# Process each external session
|
||||
synced_count = 0
|
||||
error_count = 0
|
||||
|
||||
for ext_session in external_sessions:
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# Create dashboard chat session
|
||||
dashboard_session = DashboardChatSession.objects.create(
|
||||
data_source=dash_source,
|
||||
session_id=ext_session.session_id,
|
||||
start_time=ext_session.start_time,
|
||||
end_time=ext_session.end_time,
|
||||
ip_address=ext_session.ip_address,
|
||||
country=ext_session.country or "",
|
||||
language=ext_session.language or "",
|
||||
messages_sent=ext_session.messages_sent or 0,
|
||||
sentiment=ext_session.sentiment or "",
|
||||
escalated=ext_session.escalated or False,
|
||||
forwarded_hr=ext_session.forwarded_hr or False,
|
||||
full_transcript=ext_session.full_transcript_url or "",
|
||||
avg_response_time=ext_session.avg_response_time,
|
||||
tokens=ext_session.tokens or 0,
|
||||
tokens_eur=ext_session.tokens_eur,
|
||||
category=ext_session.category or "",
|
||||
initial_msg=ext_session.initial_msg or "",
|
||||
user_rating=(str(ext_session.user_rating) if ext_session.user_rating is not None else ""),
|
||||
)
|
||||
synced_count += 1
|
||||
print(f"Synced session: {dashboard_session.session_id}")
|
||||
except Exception as e:
|
||||
print(f"Error syncing session {ext_session.session_id}: {str(e)}")
|
||||
error_count += 1
|
||||
|
||||
print(f"Sync complete. Total: {synced_count} sessions synced, {error_count} errors")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -2,6 +2,186 @@
|
||||
* dashboard.css - Styles specific to dashboard functionality
|
||||
*/
|
||||
|
||||
/* Theme variables */
|
||||
:root {
|
||||
/* Light theme (default) */
|
||||
--bg-color: #f8f9fa;
|
||||
--text-color: #212529;
|
||||
--card-bg: #ffffff;
|
||||
--card-border: #dee2e6;
|
||||
--card-header-bg: #f1f3f5;
|
||||
--sidebar-bg: #f8f9fa;
|
||||
--navbar-bg: #343a40;
|
||||
--navbar-color: #ffffff;
|
||||
--link-color: #007bff;
|
||||
--secondary-text: #6c757d;
|
||||
--border-color: #e9ecef;
|
||||
--input-bg: #ffffff;
|
||||
--input-border: #ced4da;
|
||||
--table-stripe: rgba(0, 0, 0, 0.05);
|
||||
--stats-card-bg: #f1f3f5;
|
||||
--icon-bg: #e9f2ff;
|
||||
--icon-color: #007bff;
|
||||
--theme-transition:
|
||||
color 0.2s ease, background-color 0.2s ease, border-color 0.2s ease, box-shadow 0.2s ease;
|
||||
}
|
||||
|
||||
/* Dark theme */
|
||||
[data-bs-theme="dark"] {
|
||||
--bg-color: #212529;
|
||||
--text-color: #f8f9fa;
|
||||
--card-bg: #343a40;
|
||||
--card-border: #495057;
|
||||
--card-header-bg: #495057;
|
||||
--sidebar-bg: #2c3034;
|
||||
--navbar-bg: #1c1f23;
|
||||
--navbar-color: #f8f9fa;
|
||||
--link-color: #6ea8fe;
|
||||
--secondary-text: #adb5bd;
|
||||
--border-color: #495057;
|
||||
--input-bg: #2b3035;
|
||||
--input-border: #495057;
|
||||
--table-stripe: rgba(255, 255, 255, 0.05);
|
||||
--stats-card-bg: #2c3034;
|
||||
--icon-bg: #1e3a8a;
|
||||
--icon-color: #6ea8fe;
|
||||
}
|
||||
|
||||
/* Apply theme variables */
|
||||
body {
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
transition: var(--theme-transition);
|
||||
}
|
||||
|
||||
.card {
|
||||
background-color: var(--card-bg);
|
||||
border-color: var(--card-border);
|
||||
transition: var(--theme-transition);
|
||||
}
|
||||
|
||||
.card-header {
|
||||
background-color: var(--card-header-bg);
|
||||
border-bottom-color: var(--card-border);
|
||||
transition: var(--theme-transition);
|
||||
}
|
||||
|
||||
.navbar-dark {
|
||||
background-color: var(--navbar-bg) !important;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.navbar-dark .navbar-brand,
|
||||
.navbar-dark .nav-link,
|
||||
.navbar-dark .navbar-text {
|
||||
color: var(--navbar-color) !important;
|
||||
}
|
||||
|
||||
.navbar-dark .btn-outline-light {
|
||||
border-color: var(--border-color);
|
||||
color: var(--navbar-color);
|
||||
}
|
||||
|
||||
.navbar-dark .btn-outline-light:hover {
|
||||
background-color: rgba(255, 255, 255, 0.1);
|
||||
border-color: var(--border-color);
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
background-color: var(--sidebar-bg) !important;
|
||||
}
|
||||
|
||||
/* Sidebar navigation styling with dark mode support */
|
||||
.sidebar .nav-link {
|
||||
color: var(--text-color);
|
||||
transition: all 0.2s ease;
|
||||
border-radius: 0.375rem;
|
||||
margin: 0.1rem 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
}
|
||||
|
||||
.sidebar .nav-link:hover {
|
||||
color: var(--link-color);
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
[data-bs-theme="dark"] .sidebar .nav-link:hover {
|
||||
background-color: rgba(255, 255, 255, 0.05);
|
||||
}
|
||||
|
||||
.sidebar .nav-link.active {
|
||||
color: var(--link-color);
|
||||
background-color: rgba(13, 110, 253, 0.1);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
[data-bs-theme="dark"] .sidebar .nav-link.active {
|
||||
background-color: rgba(110, 168, 254, 0.1);
|
||||
}
|
||||
|
||||
.sidebar .nav-link i {
|
||||
color: var(--secondary-text);
|
||||
width: 20px;
|
||||
text-align: center;
|
||||
margin-right: 0.5rem;
|
||||
}
|
||||
|
||||
.sidebar .nav-link:hover i,
|
||||
.sidebar .nav-link.active i {
|
||||
color: var(--link-color);
|
||||
}
|
||||
|
||||
.sidebar .nav-header {
|
||||
color: var(--secondary-text);
|
||||
font-size: 0.8rem;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
padding: 0.5rem 1.25rem;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.table {
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.table-striped tbody tr:nth-of-type(odd) {
|
||||
background-color: var(--table-stripe);
|
||||
}
|
||||
|
||||
.nav-link {
|
||||
color: var(--link-color);
|
||||
}
|
||||
|
||||
.stats-card {
|
||||
background-color: var(--stats-card-bg) !important;
|
||||
}
|
||||
|
||||
.stat-card .stat-icon {
|
||||
background-color: var(--icon-bg);
|
||||
color: var(--icon-color);
|
||||
}
|
||||
|
||||
.form-control,
|
||||
.form-select {
|
||||
background-color: var(--input-bg);
|
||||
border-color: var(--input-border);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
footer {
|
||||
background-color: var(--card-bg);
|
||||
border-top: 1px solid var(--border-color);
|
||||
color: var(--secondary-text);
|
||||
margin-top: 2rem;
|
||||
padding: 1.5rem 0;
|
||||
transition: var(--theme-transition);
|
||||
}
|
||||
|
||||
[data-bs-theme="dark"] footer {
|
||||
background-color: var(--navbar-bg);
|
||||
}
|
||||
|
||||
/* Dashboard grid layout */
|
||||
.dashboard-grid {
|
||||
display: grid;
|
||||
@ -291,7 +471,43 @@
|
||||
}
|
||||
}
|
||||
|
||||
/* --- Stat Boxes Alignment Fix (Bottom Align, No Overlap) --- */
|
||||
/* Preserve colored background for stat cards in both themes */
|
||||
.col-md-3 .card.stats-card.bg-primary {
|
||||
background-color: var(--bs-primary) !important;
|
||||
color: white !important;
|
||||
}
|
||||
|
||||
.col-md-3 .card.stats-card.bg-success {
|
||||
background-color: var(--bs-success) !important;
|
||||
color: white !important;
|
||||
}
|
||||
|
||||
.col-md-3 .card.stats-card.bg-info {
|
||||
background-color: var(--bs-info) !important;
|
||||
color: white !important;
|
||||
}
|
||||
|
||||
.col-md-3 .card.stats-card.bg-warning {
|
||||
background-color: var(--bs-warning) !important;
|
||||
color: white !important;
|
||||
}
|
||||
|
||||
.col-md-3 .card.stats-card.bg-danger {
|
||||
background-color: var(--bs-danger) !important;
|
||||
color: white !important;
|
||||
}
|
||||
|
||||
.col-md-3 .card.stats-card.bg-secondary {
|
||||
background-color: var(--bs-secondary) !important;
|
||||
color: white !important;
|
||||
}
|
||||
|
||||
.col-md-3 .card.stats-card.bg-light {
|
||||
background-color: var(--bs-light) !important;
|
||||
color: var(--bs-dark) !important;
|
||||
}
|
||||
|
||||
/* Stats Cards Alignment Fix (Bottom Align, No Overlap) */
|
||||
.stats-row {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
|
||||
@ -7,6 +7,122 @@
|
||||
*/
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
// Set up Plotly default config based on theme
|
||||
function updatePlotlyTheme() {
|
||||
// Force a fresh check of the current theme
|
||||
const isDarkMode = document.documentElement.getAttribute("data-bs-theme") === "dark";
|
||||
console.log(
|
||||
"updatePlotlyTheme called - Current theme mode:",
|
||||
isDarkMode ? "dark" : "light",
|
||||
);
|
||||
|
||||
window.plotlyDefaultLayout = {
|
||||
font: {
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
family: '-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif',
|
||||
},
|
||||
paper_bgcolor: isDarkMode ? "#343a40" : "#ffffff",
|
||||
plot_bgcolor: isDarkMode ? "#343a40" : "#ffffff",
|
||||
colorway: [
|
||||
"#4285F4",
|
||||
"#EA4335",
|
||||
"#FBBC05",
|
||||
"#34A853",
|
||||
"#FF6D00",
|
||||
"#46BDC6",
|
||||
"#DB4437",
|
||||
"#0F9D58",
|
||||
"#AB47BC",
|
||||
"#00ACC1",
|
||||
],
|
||||
margin: {
|
||||
l: 50,
|
||||
r: 30,
|
||||
t: 30,
|
||||
b: 50,
|
||||
pad: 10,
|
||||
},
|
||||
hovermode: "closest",
|
||||
xaxis: {
|
||||
automargin: true,
|
||||
gridcolor: isDarkMode ? "rgba(255,255,255,0.1)" : "rgba(0,0,0,0.1)",
|
||||
zerolinecolor: isDarkMode ? "rgba(255,255,255,0.2)" : "rgba(0,0,0,0.2)",
|
||||
title: {
|
||||
font: {
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
},
|
||||
},
|
||||
tickfont: {
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
},
|
||||
},
|
||||
yaxis: {
|
||||
automargin: true,
|
||||
gridcolor: isDarkMode ? "rgba(255,255,255,0.1)" : "rgba(0,0,0,0.1)",
|
||||
zerolinecolor: isDarkMode ? "rgba(255,255,255,0.2)" : "rgba(0,0,0,0.2)",
|
||||
title: {
|
||||
font: {
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
},
|
||||
},
|
||||
tickfont: {
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
},
|
||||
},
|
||||
legend: {
|
||||
font: {
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
},
|
||||
bgcolor: isDarkMode ? "rgba(52, 58, 64, 0.8)" : "rgba(255, 255, 255, 0.8)",
|
||||
},
|
||||
modebar: {
|
||||
bgcolor: isDarkMode ? "rgba(52, 58, 64, 0.8)" : "rgba(255, 255, 255, 0.8)",
|
||||
color: isDarkMode ? "#f8f9fa" : "#212529",
|
||||
activecolor: isDarkMode ? "#6ea8fe" : "#007bff",
|
||||
},
|
||||
};
|
||||
|
||||
// Config for specific chart types
|
||||
window.plotlyBarConfig = {
|
||||
...window.plotlyDefaultLayout,
|
||||
bargap: 0.1,
|
||||
bargroupgap: 0.2,
|
||||
};
|
||||
|
||||
window.plotlyPieConfig = {
|
||||
...window.plotlyDefaultLayout,
|
||||
showlegend: true,
|
||||
legend: {
|
||||
...window.plotlyDefaultLayout.legend,
|
||||
xanchor: "center",
|
||||
yanchor: "top",
|
||||
y: -0.2,
|
||||
x: 0.5,
|
||||
orientation: "h",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Initialize theme setting
|
||||
updatePlotlyTheme();
|
||||
|
||||
// Listen for theme changes
|
||||
const observer = new MutationObserver(function (mutations) {
|
||||
mutations.forEach(function (mutation) {
|
||||
if (mutation.attributeName === "data-bs-theme") {
|
||||
console.log(
|
||||
"Theme changed detected by observer:",
|
||||
document.documentElement.getAttribute("data-bs-theme"),
|
||||
);
|
||||
updatePlotlyTheme();
|
||||
// Use a small delay to ensure styles have been applied
|
||||
setTimeout(refreshAllCharts, 100);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
observer.observe(document.documentElement, { attributes: true });
|
||||
|
||||
// Chart responsiveness
|
||||
function resizeCharts() {
|
||||
const charts = document.querySelectorAll(".chart-container");
|
||||
@ -20,6 +136,66 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
});
|
||||
}
|
||||
|
||||
// Refresh all charts with current theme
|
||||
function refreshAllCharts() {
|
||||
if (!window.Plotly) return;
|
||||
|
||||
const currentTheme = document.documentElement.getAttribute("data-bs-theme");
|
||||
console.log("Refreshing charts with theme:", currentTheme);
|
||||
|
||||
// Update the theme settings
|
||||
updatePlotlyTheme();
|
||||
|
||||
const charts = document.querySelectorAll(".chart-container");
|
||||
charts.forEach(function (chart) {
|
||||
if (chart.id) {
|
||||
try {
|
||||
// Safe way to check if element has a plot
|
||||
const plotElement = document.getElementById(chart.id);
|
||||
if (plotElement && plotElement._fullLayout) {
|
||||
console.log("Updating chart theme for:", chart.id);
|
||||
|
||||
// Determine chart type to apply appropriate settings
|
||||
let layoutUpdate = { ...window.plotlyDefaultLayout };
|
||||
|
||||
// Check if it's a bar chart
|
||||
if (
|
||||
plotElement.data &&
|
||||
plotElement.data.some((trace) => trace.type === "bar")
|
||||
) {
|
||||
layoutUpdate = { ...window.plotlyBarConfig };
|
||||
}
|
||||
|
||||
// Check if it's a pie chart
|
||||
if (
|
||||
plotElement.data &&
|
||||
plotElement.data.some((trace) => trace.type === "pie")
|
||||
) {
|
||||
layoutUpdate = { ...window.plotlyPieConfig };
|
||||
}
|
||||
|
||||
// Force paper and plot background colors based on current theme
|
||||
// This ensures the chart background always matches the current theme
|
||||
layoutUpdate.paper_bgcolor =
|
||||
currentTheme === "dark" ? "#343a40" : "#ffffff";
|
||||
layoutUpdate.plot_bgcolor = currentTheme === "dark" ? "#343a40" : "#ffffff";
|
||||
|
||||
// Update font colors too
|
||||
layoutUpdate.font.color = currentTheme === "dark" ? "#f8f9fa" : "#212529";
|
||||
|
||||
// Apply layout updates
|
||||
Plotly.relayout(chart.id, layoutUpdate);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Error updating chart theme:", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Make refreshAllCharts available globally
|
||||
window.refreshAllCharts = refreshAllCharts;
|
||||
|
||||
// Handle window resize
|
||||
window.addEventListener("resize", function () {
|
||||
if (window.Plotly) {
|
||||
@ -27,6 +203,29 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
}
|
||||
});
|
||||
|
||||
// Call resizeCharts on initial load
|
||||
if (window.Plotly) {
|
||||
// Use a longer delay to ensure charts are fully loaded
|
||||
setTimeout(function () {
|
||||
updatePlotlyTheme();
|
||||
refreshAllCharts();
|
||||
}, 300);
|
||||
}
|
||||
|
||||
// Apply theme to newly created charts
|
||||
const originalPlotlyNewPlot = Plotly.newPlot;
|
||||
Plotly.newPlot = function () {
|
||||
const args = Array.from(arguments);
|
||||
// Get the layout argument (3rd argument)
|
||||
if (args.length >= 3 && typeof args[2] === "object") {
|
||||
// Ensure plotlyDefaultLayout is up to date
|
||||
updatePlotlyTheme();
|
||||
// Apply current theme to new plot
|
||||
args[2] = { ...window.plotlyDefaultLayout, ...args[2] };
|
||||
}
|
||||
return originalPlotlyNewPlot.apply(this, args);
|
||||
};
|
||||
|
||||
// Time range filtering
|
||||
const timeRangeDropdown = document.getElementById("timeRangeDropdown");
|
||||
if (timeRangeDropdown) {
|
||||
@ -157,11 +356,14 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
},
|
||||
],
|
||||
{
|
||||
...window.plotlyDefaultLayout,
|
||||
margin: { t: 10, r: 10, b: 40, l: 40 },
|
||||
xaxis: {
|
||||
...window.plotlyDefaultLayout.xaxis,
|
||||
title: "Date",
|
||||
},
|
||||
yaxis: {
|
||||
...window.plotlyDefaultLayout.yaxis,
|
||||
title: "Number of Sessions",
|
||||
},
|
||||
},
|
||||
@ -204,6 +406,7 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
},
|
||||
],
|
||||
{
|
||||
...window.plotlyDefaultLayout,
|
||||
margin: { t: 10, r: 10, b: 10, l: 10 },
|
||||
},
|
||||
);
|
||||
@ -229,8 +432,10 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
},
|
||||
],
|
||||
{
|
||||
...window.plotlyDefaultLayout,
|
||||
margin: { t: 10, r: 10, b: 40, l: 100 },
|
||||
xaxis: {
|
||||
...window.plotlyDefaultLayout.xaxis,
|
||||
title: "Number of Sessions",
|
||||
},
|
||||
},
|
||||
@ -255,6 +460,7 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
},
|
||||
],
|
||||
{
|
||||
...window.plotlyDefaultLayout,
|
||||
margin: { t: 10, r: 10, b: 10, l: 10 },
|
||||
},
|
||||
);
|
||||
|
||||
@ -148,5 +148,99 @@ document.addEventListener("DOMContentLoaded", function () {
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener("resize", handleSidebarOnResize);
|
||||
window.addEventListener("resize", handleSidebarOnResize); // Theme toggling functionality
|
||||
function setTheme(theme, isUserPreference = false) {
|
||||
console.log("Setting theme to:", theme, "User preference:", isUserPreference);
|
||||
|
||||
// Update the HTML attribute that controls theme
|
||||
document.documentElement.setAttribute("data-bs-theme", theme);
|
||||
|
||||
// Save the theme preference to localStorage
|
||||
localStorage.setItem("theme", theme);
|
||||
|
||||
// If this was a user choice (from the toggle button), record that fact
|
||||
if (isUserPreference) {
|
||||
localStorage.setItem("userPreferredTheme", "true");
|
||||
}
|
||||
|
||||
// Update toggle button icon
|
||||
const themeToggle = document.getElementById("theme-toggle");
|
||||
if (themeToggle) {
|
||||
const icon = themeToggle.querySelector("i");
|
||||
if (theme === "dark") {
|
||||
icon.classList.remove("fa-moon");
|
||||
icon.classList.add("fa-sun");
|
||||
themeToggle.setAttribute("title", "Switch to light mode");
|
||||
themeToggle.setAttribute("aria-label", "Switch to light mode");
|
||||
} else {
|
||||
icon.classList.remove("fa-sun");
|
||||
icon.classList.add("fa-moon");
|
||||
themeToggle.setAttribute("title", "Switch to dark mode");
|
||||
themeToggle.setAttribute("aria-label", "Switch to dark mode");
|
||||
}
|
||||
}
|
||||
|
||||
// If we're on a page with charts, refresh them to match the theme
|
||||
if (typeof window.refreshAllCharts === "function") {
|
||||
console.log("Calling refresh charts from theme toggle");
|
||||
// Add a small delay to ensure DOM updates have completed
|
||||
setTimeout(window.refreshAllCharts, 100);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the user has a system preference for dark mode
|
||||
function getSystemPreference() {
|
||||
return window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
|
||||
}
|
||||
|
||||
// Initialize theme based on saved preference or system setting
|
||||
function initializeTheme() {
|
||||
// Check if the user has explicitly set a preference
|
||||
const hasUserPreference = localStorage.getItem("userPreferredTheme") === "true";
|
||||
const savedTheme = localStorage.getItem("theme");
|
||||
const systemTheme = getSystemPreference();
|
||||
|
||||
console.log("Theme initialization:", {
|
||||
hasUserPreference,
|
||||
savedTheme,
|
||||
systemTheme,
|
||||
});
|
||||
|
||||
// Use saved theme if it exists and was set by user
|
||||
// Otherwise, use system preference
|
||||
if (hasUserPreference && savedTheme) {
|
||||
setTheme(savedTheme);
|
||||
} else {
|
||||
// No user preference, use system preference
|
||||
setTheme(systemTheme);
|
||||
// Clear any saved theme to ensure it uses system preference
|
||||
localStorage.removeItem("userPreferredTheme");
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize theme on page load
|
||||
initializeTheme();
|
||||
|
||||
// Listen for system preference changes
|
||||
const colorSchemeMediaQuery = window.matchMedia("(prefers-color-scheme: dark)");
|
||||
colorSchemeMediaQuery.addEventListener("change", (e) => {
|
||||
// Only update theme based on system if user hasn't set a preference
|
||||
const hasUserPreference = localStorage.getItem("userPreferredTheme") === "true";
|
||||
console.log("System preference changed. Following system?", !hasUserPreference);
|
||||
|
||||
if (!hasUserPreference) {
|
||||
setTheme(e.matches ? "dark" : "light");
|
||||
}
|
||||
});
|
||||
|
||||
// Theme toggle button functionality
|
||||
const themeToggle = document.getElementById("theme-toggle");
|
||||
if (themeToggle) {
|
||||
themeToggle.addEventListener("click", function () {
|
||||
const currentTheme = document.documentElement.getAttribute("data-bs-theme") || "light";
|
||||
const newTheme = currentTheme === "dark" ? "light" : "dark";
|
||||
console.log("Manual theme toggle from", currentTheme, "to", newTheme);
|
||||
setTheme(newTheme, true); // true indicates this is a user preference
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@ -71,6 +71,17 @@
|
||||
</ul>
|
||||
|
||||
<div class="d-flex">
|
||||
<!-- Theme Toggle Button -->
|
||||
<button
|
||||
id="theme-toggle"
|
||||
class="btn btn-outline-light me-2"
|
||||
type="button"
|
||||
aria-label="Toggle theme"
|
||||
title="Toggle light/dark mode"
|
||||
>
|
||||
<i class="fas fa-moon"></i>
|
||||
</button>
|
||||
|
||||
{% if user.is_authenticated %}
|
||||
<div class="dropdown">
|
||||
<button
|
||||
@ -116,7 +127,7 @@
|
||||
<!-- Sidebar -->
|
||||
<nav
|
||||
id="sidebarMenu"
|
||||
class="col-md-3 col-lg-2 d-md-block bg-light sidebar collapse sticky-top h-100 p-0"
|
||||
class="col-md-3 col-lg-2 d-md-block sidebar collapse sticky-top h-100 p-0"
|
||||
>
|
||||
<div class="sidebar-sticky pt-3">
|
||||
{% block sidebar %}
|
||||
|
||||
@ -62,6 +62,19 @@
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
{% if user.is_superuser %}
|
||||
<form
|
||||
method="post"
|
||||
action="{% url 'data_integration:manual_data_refresh' %}"
|
||||
class="ms-2 d-inline"
|
||||
>
|
||||
{% csrf_token %}
|
||||
<button type="submit" class="btn btn-sm btn-outline-info">
|
||||
<i class="fas fa-sync"></i> Refresh Data
|
||||
</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@ -103,60 +103,86 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Export to CSV -->
|
||||
<!-- Export to CSV - Collapsible Section -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<div class="card-header d-flex justify-content-between align-items-center">
|
||||
<h5 class="card-title mb-0">Export Data</h5>
|
||||
<button
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#exportCollapse"
|
||||
aria-expanded="false"
|
||||
aria-controls="exportCollapse"
|
||||
>
|
||||
<i class="fas fa-chevron-down"></i>
|
||||
</button>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<form id="export-form" method="get" action="{% url 'export_chats_csv' %}" class="row g-3">
|
||||
<!-- Pass current filters to export -->
|
||||
<input type="hidden" name="data_source_id" value="{{ selected_data_source.id }}" />
|
||||
<input type="hidden" name="view" value="{{ view }}" />
|
||||
<div class="collapse" id="exportCollapse">
|
||||
<div class="card-body">
|
||||
<form
|
||||
id="export-form"
|
||||
method="get"
|
||||
action="{% url 'export_chats_csv' %}"
|
||||
class="row g-3"
|
||||
>
|
||||
<!-- Pass current filters to export -->
|
||||
<input type="hidden" name="data_source_id" value="{{ selected_data_source.id }}" />
|
||||
<input type="hidden" name="view" value="{{ view }}" />
|
||||
|
||||
<div class="col-md-3">
|
||||
<label for="start_date" class="form-label">Start Date</label>
|
||||
<input type="date" name="start_date" id="start_date" class="form-control" />
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="end_date" class="form-label">End Date</label>
|
||||
<input type="date" name="end_date" id="end_date" class="form-control" />
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="country" class="form-label">Country</label>
|
||||
<input
|
||||
type="text"
|
||||
name="country"
|
||||
id="country"
|
||||
class="form-control"
|
||||
placeholder="Country"
|
||||
/>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="sentiment" class="form-label">Sentiment</label>
|
||||
<select name="sentiment" id="sentiment" class="form-select">
|
||||
<option value="">All</option>
|
||||
<option value="positive">Positive</option>
|
||||
<option value="negative">Negative</option>
|
||||
<option value="neutral">Neutral</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="escalated" class="form-label">Escalated</label>
|
||||
<select name="escalated" id="escalated" class="form-select">
|
||||
<option value="">All</option>
|
||||
<option value="true">Yes</option>
|
||||
<option value="false">No</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3 d-flex align-items-end">
|
||||
<button type="submit" class="btn btn-success w-100">
|
||||
<i class="fas fa-file-csv me-1"></i> Export to CSV
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
<div class="col-md-3">
|
||||
<label for="start_date" class="form-label">Start Date</label>
|
||||
<input type="date" name="start_date" id="start_date" class="form-control" />
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="end_date" class="form-label">End Date</label>
|
||||
<input type="date" name="end_date" id="end_date" class="form-control" />
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="country" class="form-label">Country</label>
|
||||
<input
|
||||
type="text"
|
||||
name="country"
|
||||
id="country"
|
||||
class="form-control"
|
||||
placeholder="Country"
|
||||
/>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="sentiment" class="form-label">Sentiment</label>
|
||||
<select name="sentiment" id="sentiment" class="form-select">
|
||||
<option value="">All</option>
|
||||
<option value="positive">Positive</option>
|
||||
<option value="negative">Negative</option>
|
||||
<option value="neutral">Neutral</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label for="escalated" class="form-label">Escalated</label>
|
||||
<select name="escalated" id="escalated" class="form-select">
|
||||
<option value="">All</option>
|
||||
<option value="true">Yes</option>
|
||||
<option value="false">No</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3 d-flex align-items-end">
|
||||
<button type="submit" class="btn btn-success w-100">
|
||||
<i class="fas fa-file-csv me-1"></i> Export to CSV
|
||||
</button>
|
||||
</div>
|
||||
<div class="col-md-3 d-flex align-items-end">
|
||||
<button
|
||||
type="submit"
|
||||
class="btn btn-info w-100"
|
||||
formaction="{% url 'export_chats_json' %}"
|
||||
>
|
||||
<i class="fas fa-file-code me-1"></i> Export to JSON
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user