Implement data integration tasks with Celery, including periodic fetching and manual refresh of chat data; add utility functions for data processing and transcript handling; create views and URLs for manual data refresh; establish Redis and Celery configuration; enhance error handling and logging; introduce scripts for data cleanup and fixing dashboard data; update documentation for Redis and Celery setup and troubleshooting.

This commit is contained in:
2025-05-18 13:33:11 +00:00
parent e8f2d2adc2
commit 8bbbb109bd
63 changed files with 4601 additions and 164 deletions

View File

@ -0,0 +1,145 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/ubuntu
{
"name": "Ubuntu",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/base:jammy",
// Features to add to the dev container. More info: https://containers.dev/features.
"features": {
"ghcr.io/devcontainers-community/npm-features/prettier:1": {
"version": "latest"
},
"ghcr.io/devcontainers-extra/features/gitmux:1": {
"version": "latest"
},
"ghcr.io/devcontainers-extra/features/pre-commit:2": {
"version": "latest"
},
"ghcr.io/devcontainers-extra/features/ruff:1": {
"version": "latest"
},
"ghcr.io/devcontainers-extra/features/shfmt:1": {
"version": "latest"
},
"ghcr.io/devcontainers-extra/features/tmux-apt-get:1": {},
"ghcr.io/devcontainers/features/common-utils:2": {},
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/github-cli:1": {
"installDirectlyFromGitHubRelease": true,
"version": "latest"
},
"ghcr.io/devcontainers/features/go:1": {},
"ghcr.io/devcontainers/features/node:1": {
"installYarnUsingApt": true,
"nodeGypDependencies": true,
"nvmVersion": "latest",
"pnpmVersion": "latest",
"version": "latest"
},
"ghcr.io/devcontainers/features/powershell:1": {
"version": "latest"
},
"ghcr.io/devcontainers/features/python:1": {
"enableShared": true,
"installJupyterlab": true,
"installTools": true,
"version": "latest"
},
"ghcr.io/devcontainers/features/sshd:1": {
"version": "latest"
},
"ghcr.io/hspaans/devcontainer-features/django-upgrade:1": {
"version": "latest"
},
"ghcr.io/itsmechlark/features/redis-server:1": {
"version": "latest"
},
"ghcr.io/jsburckhardt/devcontainer-features/uv:1": {},
"ghcr.io/warrenbuckley/codespace-features/sqlite:1": {}
},
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"esbenp.prettier-vscode",
"GitHub.copilot-chat",
"GitHub.copilot-workspace",
"ms-vscode.copilot-mermaid-diagram",
"ms-vscode.vscode-copilot-data-analysis",
"ms-vscode.vscode-copilot-vision",
"ms-vscode.vscode-websearchforcopilot",
"PyCQA.bandit-pycqa",
"tamasfe.even-better-toml",
"timonwong.shellcheck",
"trunk.io"
],
"settings": {
"[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[html]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[python]": {
"editor.codeActionsOnSave": {
"source.fixAll": "explicit",
"source.organizeImports": "explicit"
},
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.formatOnSave": true
},
"[toml]": {
"editor.defaultFormatter": "tamasfe.even-better-toml"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"emmet.includeLanguages": {
"django-html": "html",
"jinja-html": "html"
},
"emmet.syntaxProfiles": {
"html": {
"inline_break": 2
}
},
"files.associations": {
"*.html": "html"
},
"html.format.wrapAttributes": "auto",
"html.format.wrapLineLength": 100,
"notebook.codeActionsOnSave": {
"notebook.source.fixAll": "explicit",
"notebook.source.organizeImports": "explicit"
},
"notebook.formatOnSave.enabled": true,
"prettier.requireConfig": true,
"python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
"json.schemas": [
{
"fileMatch": ["*/devcontainer-feature.json"],
"url": "https://raw.githubusercontent.com/devcontainers/spec/main/schemas/devContainerFeature.schema.json"
},
{
"fileMatch": ["*/devcontainer.json"],
"url": "https://raw.githubusercontent.com/devcontainers/spec/main/schemas/devContainer.schema.json"
}
]
}
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [6379, 8001],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "sudo apt update && sudo apt full-upgrade -y && sudo apt autoremove -y; sudo apt install -y ruby-foreman; npm i -g prettier prettier-plugin-jinja-template; redis-server --daemonize yes; uname -a; export UV_LINK_MODE=copy; uv python install; uv pip install -Ur pyproject.toml"
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

16
.env.sample Normal file
View File

@ -0,0 +1,16 @@
# .env.sample - rename to .env and update with actual credentials
# Django settings
DJANGO_SECRET_KEY=your-secure-secret-key
DJANGO_DEBUG=True
# External API credentials
EXTERNAL_API_USERNAME=your-api-username
EXTERNAL_API_PASSWORD=your-api-password
# Redis settings for Celery
REDIS_URL=redis://localhost:6379/0
CELERY_BROKER_URL=redis://localhost:6379/0
CELERY_RESULT_BACKEND=redis://localhost:6379/0
# Celery Task Schedule (in seconds)
CHAT_DATA_FETCH_INTERVAL=3600

22
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,22 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for more information:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
# https://containers.dev/guide/dependabot
version: 2
updates:
- package-ecosystem: devcontainers
directory: /
schedule:
interval: weekly
day: tuesday
time: 03:00
timezone: Europe/Amsterdam
- package-ecosystem: uv
directory: /
schedule:
interval: weekly
day: tuesday
time: 03:00
timezone: Europe/Amsterdam

6
.gitignore vendored
View File

@ -406,7 +406,7 @@ pyrightconfig.json
# Local environment variables
*Zone.Identifier
examples/
**/migrations/[0-9]**.py
# **/migrations/[0-9]**.py
package-lock.json
# UV specific
@ -417,3 +417,7 @@ package-lock.json
.vscode/
.idea/
.pyright/
# Local database files
*.rdb
*.sqlite

View File

@ -5,6 +5,11 @@ default_install_hook_types:
- post-rewrite
repos:
- repo: https://github.com/adamchainz/django-upgrade
rev: 1.25.0
hooks:
- id: django-upgrade
# uv hooks for dependency management
- repo: https://github.com/astral-sh/uv-pre-commit
rev: 0.7.5
@ -18,16 +23,21 @@ repos:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-json
# - id: check-json
- id: check-toml
- id: check-added-large-files
args: ["--maxkb=500"]
args: [--maxkb=1000]
- id: detect-private-key
- id: check-merge-conflict
- id: check-case-conflict
- id: debug-statements
- id: mixed-line-ending
args: ["--fix=lf"]
args: [--fix=lf]
# - repo: https://github.com/psf/black
# rev: 22.10.0
# hooks:
# - id: black
# # HTML/Django template linting
# - repo: https://github.com/rtts/djhtml
@ -78,7 +88,7 @@ repos:
rev: 1.8.3
hooks:
- id: bandit
args: ["-c", "pyproject.toml", "-r", "dashboard_project"]
args: [-c, pyproject.toml, -r, dashboard_project]
additional_dependencies: ["bandit[toml]"]
# # Type checking

View File

@ -1,4 +1,4 @@
.PHONY: venv install install-dev lint test format clean run migrate makemigrations superuser setup-node
.PHONY: venv install install-dev lint test format clean run migrate makemigrations superuser setup-node celery celery-beat docker-build docker-up docker-down reset-db setup-dev procfile
# Create a virtual environment
venv:
@ -51,6 +51,14 @@ clean:
run:
cd dashboard_project && uv run python manage.py runserver 8001
# Run Celery worker for background tasks
celery:
cd dashboard_project && uv run celery -A dashboard_project worker --loglevel=info
# Run Celery Beat for scheduled tasks
celery-beat:
cd dashboard_project && uv run celery -A dashboard_project beat --scheduler django_celery_beat.schedulers:DatabaseScheduler
# Apply migrations
migrate:
cd dashboard_project && uv run python manage.py migrate
@ -69,9 +77,51 @@ lock:
# Setup pre-commit hooks
setup-pre-commit:
uv pip install pre-commit
pre-commit install
# Run pre-commit on all files
lint-all:
pre-commit run --all-files
# Docker commands
docker-build:
docker-compose build
docker-up:
docker-compose up -d
docker-down:
docker-compose down
# Initialize or reset the database in development
reset-db:
cd dashboard_project && uv run python manage.py flush --no-input
cd dashboard_project && uv run python manage.py migrate
# Start a Redis server in development (if not installed, fallback to SQLite)
run-redis:
redis-server || echo "Redis not installed, using SQLite fallback"
# Start all development services (web, redis, celery, celery-beat)
run-all:
make run-redis & \
make run & \
make celery & \
make celery-beat
# Test Celery task
test-celery:
cd dashboard_project && uv run python manage.py test_celery
# Initialize data integration
init-data-integration:
cd dashboard_project && uv run python manage.py create_default_datasource
cd dashboard_project && uv run python manage.py create_default_datasource
cd dashboard_project && uv run python manage.py test_celery
# Setup development environment
setup-dev: venv install-dev migrate create_default_datasource
@echo "Development environment setup complete"
procfile:
foreman start

3
Procfile Executable file
View File

@ -0,0 +1,3 @@
server: cd dashboard_project && CELERY_BROKER_URL=redis://localhost:6379/0 CELERY_RESULT_BACKEND=redis://localhost:6379/0 UV_LINK_MODE=copy uv run python manage.py runserver 8001
celery: cd dashboard_project && CELERY_BROKER_URL=redis://localhost:6379/0 CELERY_RESULT_BACKEND=redis://localhost:6379/0 UV_LINK_MODE=copy uv run celery -A dashboard_project worker --loglevel=info
celery-beat: cd dashboard_project && CELERY_BROKER_URL=redis://localhost:6379/0 CELERY_RESULT_BACKEND=redis://localhost:6379/0 UV_LINK_MODE=copy uv run celery -A dashboard_project beat --scheduler django_celery_beat.schedulers:DatabaseScheduler

View File

@ -74,13 +74,57 @@ A Django application that creates an analytics dashboard for chat session data.
python manage.py createsuperuser
```
7. Run the development server:
7. Set up environment variables:
```sh
# Copy the sample .env file
cp .env.sample .env
# Edit the .env file with your credentials
nano .env
```
Be sure to update:
- `EXTERNAL_API_USERNAME` and `EXTERNAL_API_PASSWORD` for the data integration API
- `DJANGO_SECRET_KEY` for production environments
- Redis URL if using a different configuration for Celery
8. Start Celery for background tasks:
```sh
# In a separate terminal
cd dashboard_project
celery -A dashboard_project worker --loglevel=info
# Start the Celery Beat scheduler in another terminal
cd dashboard_project
celery -A dashboard_project beat --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
Alternative without Redis (using SQLite):
```sh
# Set environment variables to use SQLite instead of Redis
export CELERY_BROKER_URL=sqla+sqlite:///celery.sqlite
export CELERY_RESULT_BACKEND=db+sqlite:///results.sqlite
# In a separate terminal
cd dashboard_project
celery -A dashboard_project worker --loglevel=info
# Start the Celery Beat scheduler in another terminal with the same env vars
cd dashboard_project
celery -A dashboard_project beat --scheduler django_celery_beat.schedulers:DatabaseScheduler
```
9. Run the development server:
```sh
python manage.py runserver
```
8. Access the application at <http://127.0.0.1:8000/>
10. Access the application at <http://127.0.0.1:8000/>
### Development Workflow with UV

38
TODO.md
View File

@ -8,22 +8,32 @@
### Theming
- [ ] Add dark mode/light mode toggle
- [x] Add dark mode/light mode toggle
- [x] Fix dark mode implementation issues:
- [x] Make charts display properly in dark mode
- [x] Fix the footer not changing color in dark mode
- [x] Adjust the sidebar nav-link styling for dark mode
- [x] Make the navbar have a different background color from the body in dark mode
- [x] Make theme toggle automatically detect and respect the user's system preference
- [x] Fix inconsistency between system dark mode preference and manual toggle
- [x] Ensure charts properly update in both scenarios (system preference and manual toggle)
- [x] Implement smooth theme transitions
- [ ] Add Notso AI branding elements
- [ ] Implement responsive table design (reduce rows to fit screen)
### Data Export
- [ ] Implement multi-format export functionality
- [ ] CSV format
- [x] Implement multi-format export functionality
- [x] CSV format
- [ ] Excel format
- [ ] JSON format
- [x] JSON format
- [ ] XML format
- [ ] HTML format
- [ ] PDF format
- [ ] Create dropdown menu for export options
- [ ] Make export data section collapsible (folded by default)
- [ ] Add company name, date and timestamp to exported filenames
- [x] Make export data section collapsible (folded by default)
- [x] Add company name, date and timestamp to exported filenames
- [ ] Update [data view](dashboard_project/templates/dashboard/partials/data_table.html) to show maximum 10 rows by default, with a "Show more" button to expand to 50 rows, or "Show all" to display all rows
## Admin Interface Enhancements
@ -39,10 +49,20 @@
### External Data Sources
- [ ] Implement periodic data download from external API
- [ ] Source: <https://proto.notso.ai/XY/chats>
- [ ] Authentication: Basic Auth
- [ ] Credentials: [stored securely]
- Source: <https://proto.notso.ai/jumbo/chats>
- Authentication: Basic Auth
- Credentials: [stored securely]
- An example of the data structure can be found in [jumbo.csv](examples/jumbo.csv)
- The file that the endpoint returns is a CSV file, but the file is not a standard CSV file. It has a different structure and format:
- The header row is missing, it is supposed to be `session_id,start_time,end_time,ip_address,country,language,messages_sent,sentiment,escalated,forwarded_hr,full_transcript,avg_response_time,tokens,tokens_eur,category,initial_msg,user_rating`
- [ ] The coupling of endpoint to the company and the authentication method should be handled in the backend and the superuser should be able to change it.
- [ ] The data should be stored in the database and the dashboard should be updated with the new data.
- [ ] The csv also contains a column with full_transcript, which is a uri to a txt file, encoded in utf-8. The txt file is a raw transcript of the chat.
- [ ] The txt file should be downloaded, parsed and stored in the database.
- An example of such txt file can be found in [132f3a8c-3ba5-4d89-ae04-cd83f1bc5272.txt](examples/132f3a8c-3ba5-4d89-ae04-cd83f1bc5272.txt)
- Note that the User and Assistant messages can be multiline and can contain html, which should be safely handled, and if safe, rendered in the frontend.
- [ ] Add scheduling options for data refresh
- [ ] Add UI button to trigger manual data refresh
## Technical Debt

View File

@ -31,7 +31,15 @@ def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
# For specific commands, insert the command name at the start of argv
if cmd_name in ["runserver", "migrate", "makemigrations", "collectstatic", "createsuperuser", "shell", "test"]:
if cmd_name in [
"runserver",
"migrate",
"makemigrations",
"collectstatic",
"createsuperuser",
"shell",
"test",
]:
sys.argv.insert(1, cmd_name)
# Execute the Django management command

View File

@ -7,6 +7,7 @@ from .forms import CustomUserChangeForm, CustomUserCreationForm
from .models import Company, CustomUser
@admin.register(CustomUser)
class CustomUserAdmin(UserAdmin):
add_form = CustomUserCreationForm
form = CustomUserChangeForm
@ -63,15 +64,11 @@ class CustomUserAdmin(UserAdmin):
obj.save()
@admin.register(Company)
class CompanyAdmin(admin.ModelAdmin):
list_display = ("name", "created_at", "get_employee_count")
search_fields = ("name", "description")
@admin.display(description="Employees")
def get_employee_count(self, obj):
return obj.employees.count()
get_employee_count.short_description = "Employees"
admin.site.register(CustomUser, CustomUserAdmin)
admin.site.register(Company, CompanyAdmin)

View File

@ -0,0 +1,151 @@
# Generated by Django 5.2.1 on 2025-05-16 21:18
import django.contrib.auth.models
import django.contrib.auth.validators
import django.db.models.deletion
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0012_alter_user_first_name_max_length"),
]
operations = [
migrations.CreateModel(
name="Company",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=100)),
("description", models.TextField(blank=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
],
options={
"verbose_name_plural": "Companies",
},
),
migrations.CreateModel(
name="CustomUser",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(blank=True, null=True, verbose_name="last login"),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={"unique": "A user with that username already exists."},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
verbose_name="username",
),
),
(
"first_name",
models.CharField(blank=True, max_length=150, verbose_name="first name"),
),
(
"last_name",
models.CharField(blank=True, max_length=150, verbose_name="last name"),
),
(
"email",
models.EmailField(blank=True, max_length=254, verbose_name="email address"),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. "
"Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined"),
),
("is_company_admin", models.BooleanField(default=False)),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions "
"granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.group",
verbose_name="groups",
),
),
(
"user_permissions",
models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.permission",
verbose_name="user permissions",
),
),
(
"company",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="employees",
to="accounts.company",
),
),
],
options={
"verbose_name": "user",
"verbose_name_plural": "users",
"abstract": False,
},
managers=[
("objects", django.contrib.auth.models.UserManager()),
],
),
]

View File

@ -5,18 +5,55 @@ from django.contrib import admin
from .models import ChatSession, Dashboard, DataSource
@admin.register(DataSource)
class DataSourceAdmin(admin.ModelAdmin):
list_display = ("name", "company", "uploaded_at", "get_session_count")
list_display = (
"name",
"company",
"uploaded_at",
"get_external_source",
"get_session_count",
)
list_filter = ("company", "uploaded_at")
search_fields = ("name", "description", "company__name")
ordering = ("-uploaded_at",)
readonly_fields = ("get_external_data_status",)
fieldsets = (
(None, {"fields": ("name", "description", "company")}),
(
"Data Source",
{
"fields": ("file", "external_source"),
"description": "Either upload a file OR select an external data source. Not both.",
},
),
(
"Stats",
{
"fields": ("get_external_data_status",),
},
),
)
@admin.display(description="Sessions")
def get_session_count(self, obj):
return obj.chat_sessions.count()
get_session_count.short_description = "Sessions"
@admin.display(description="External Source")
def get_external_source(self, obj):
if obj.external_source:
return obj.external_source.name
return "None"
@admin.display(description="External Data Status")
def get_external_data_status(self, obj):
if obj.external_source:
return f"Last synced: {obj.external_source.last_synced or 'Never'} | Status: {obj.external_source.get_status()}"
return "No external data source linked"
@admin.register(ChatSession)
class ChatSessionAdmin(admin.ModelAdmin):
list_display = (
"session_id",
@ -45,21 +82,18 @@ class ChatSessionAdmin(admin.ModelAdmin):
)
ordering = ("-start_time",)
@admin.display(
description="Company",
ordering="data_source__company__name",
)
def get_company(self, obj):
return obj.data_source.company.name
get_company.short_description = "Company"
get_company.admin_order_field = "data_source__company__name"
@admin.register(Dashboard)
class DashboardAdmin(admin.ModelAdmin):
list_display = ("name", "company", "created_at", "updated_at")
list_filter = ("company", "created_at")
search_fields = ("name", "description", "company__name")
filter_horizontal = ("data_sources",)
ordering = ("-updated_at",)
admin.site.register(DataSource, DataSourceAdmin)
admin.site.register(ChatSession, ChatSessionAdmin)
admin.site.register(Dashboard, DashboardAdmin)

View File

@ -6,3 +6,7 @@ from django.apps import AppConfig
class DashboardConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "dashboard"
def ready(self):
# Import signals
pass

View File

@ -0,0 +1,173 @@
# dashboard/management/commands/create_test_data.py
import csv
import os
from datetime import datetime, timedelta
from dashboard.models import DataSource
from data_integration.models import ChatMessage, ChatSession, ExternalDataSource
from django.core.management.base import BaseCommand
from django.utils.timezone import make_aware
class Command(BaseCommand):
help = "Create test data for external data source and link it to a dashboard data source"
def add_arguments(self, parser):
parser.add_argument(
"--company-id",
type=int,
help="Company ID to associate with the data source",
required=True,
)
parser.add_argument(
"--sample-file",
type=str,
help="Path to sample CSV file",
default="examples/sample.csv",
)
def handle(self, *args, **options): # noqa: ARG002
company_id = options["company_id"]
sample_file = options["sample_file"]
# Check if sample file exists
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../.."))
sample_path = os.path.join(project_root, sample_file)
if not os.path.exists(sample_path):
self.stdout.write(self.style.ERROR(f"Sample file not found: {sample_path}"))
return
# Create or get external data source
ext_source, created = ExternalDataSource.objects.get_or_create(
name="Test External Source",
defaults={
"api_url": "https://example.com/api",
"is_active": True,
"sync_interval": 3600,
"last_synced": make_aware(datetime.now()),
},
)
if created:
self.stdout.write(
self.style.SUCCESS(f"Created external data source: {ext_source.name} (ID: {ext_source.id})")
)
else:
self.stdout.write(f"Using existing external data source: {ext_source.name} (ID: {ext_source.id})")
# Create or get dashboard data source linked to external source
dash_source, created = DataSource.objects.get_or_create(
name="Test Dashboard Source",
company_id=company_id,
external_source=ext_source,
defaults={"description": "Test data source linked to external API"},
)
if created:
self.stdout.write(
self.style.SUCCESS(f"Created dashboard data source: {dash_source.name} (ID: {dash_source.id})")
)
else:
self.stdout.write(f"Using existing dashboard data source: {dash_source.name} (ID: {dash_source.id})")
# Import test data from CSV
session_count = 0
message_count = 0
# First clear any existing sessions
existing_count = ChatSession.objects.filter().count()
if existing_count > 0:
self.stdout.write(f"Clearing {existing_count} existing chat sessions")
ChatSession.objects.all().delete()
# Parse sample CSV
with open(sample_path, "r") as f:
reader = csv.reader(f)
header = next(reader) # Skip header
for row in reader:
# Make sure row has enough elements
padded_row = row + [""] * (len(header) - len(row))
# Create a dict from the row
data = dict(zip(header, padded_row, strict=False))
# Create a chat session
try:
# Parse dates
try:
start_time = make_aware(datetime.strptime(data.get("start_time", ""), "%d.%m.%Y %H:%M:%S"))
except ValueError:
start_time = make_aware(datetime.now() - timedelta(hours=1))
try:
end_time = make_aware(datetime.strptime(data.get("end_time", ""), "%d.%m.%Y %H:%M:%S"))
except ValueError:
end_time = make_aware(datetime.now())
# Convert values to appropriate types
escalated = data.get("escalated", "").lower() == "true"
forwarded_hr = data.get("forwarded_hr", "").lower() == "true"
messages_sent = int(data.get("messages_sent", 0) or 0)
tokens = int(data.get("tokens", 0) or 0)
tokens_eur = float(data.get("tokens_eur", 0) or 0)
user_rating = int(data.get("user_rating", 0) or 0) if data.get("user_rating", "") else None
# Create session
session = ChatSession.objects.create(
session_id=data.get("session_id", f"test-{session_count}"),
start_time=start_time,
end_time=end_time,
ip_address=data.get("ip_address", "127.0.0.1"),
country=data.get("country", ""),
language=data.get("language", ""),
messages_sent=messages_sent,
sentiment=data.get("sentiment", ""),
escalated=escalated,
forwarded_hr=forwarded_hr,
full_transcript_url=data.get("full_transcript", ""),
avg_response_time=float(data.get("avg_response_time", 0) or 0),
tokens=tokens,
tokens_eur=tokens_eur,
category=data.get("category", ""),
initial_msg=data.get("initial_msg", ""),
user_rating=user_rating,
)
session_count += 1
# Create messages for this session
if data.get("initial_msg"):
# User message
ChatMessage.objects.create(
session=session,
sender="User",
message=data.get("initial_msg", ""),
timestamp=start_time,
)
message_count += 1
# Assistant response
ChatMessage.objects.create(
session=session,
sender="Assistant",
message=f"This is a test response to {data.get('initial_msg', '')}",
timestamp=start_time + timedelta(seconds=30),
)
message_count += 1
except Exception as e:
self.stdout.write(self.style.ERROR(f"Error creating session: {e}"))
self.stdout.write(self.style.SUCCESS(f"Created {session_count} chat sessions with {message_count} messages"))
# Run the sync command to copy data to dashboard
self.stdout.write("Syncing data to dashboard...")
from django.core.management import call_command
call_command("sync_external_data", source_id=ext_source.id)
self.stdout.write(self.style.SUCCESS("Done! Your dashboard should now show test data."))

View File

@ -0,0 +1,128 @@
# dashboard/management/commands/sync_external_data.py
import logging
from dashboard.models import ChatSession as DashboardChatSession
from dashboard.models import DataSource
from data_integration.models import ChatSession as ExternalChatSession
from django.core.management.base import BaseCommand
from django.db import transaction
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Synchronize data from external data sources to dashboard data sources"
def add_arguments(self, parser):
parser.add_argument(
"--source-id",
type=int,
help="Specific external data source ID to sync",
required=False,
)
parser.add_argument(
"--clear",
action="store_true",
help="Clear existing dashboard data before sync",
)
def handle(self, *args, **options): # noqa: ARG002
source_id = options.get("source_id")
clear_existing = options.get("clear", False)
# Get all datasources that have an external_source
if source_id:
data_sources = DataSource.objects.filter(external_source_id=source_id)
if not data_sources.exists():
self.stdout.write(
self.style.WARNING(f"No dashboard data sources linked to external source ID {source_id}")
)
return
else:
data_sources = DataSource.objects.exclude(external_source=None)
if not data_sources.exists():
self.stdout.write(self.style.WARNING("No dashboard data sources with external sources found"))
return
total_synced = 0
total_errors = 0
for data_source in data_sources:
self.stdout.write(f"Processing dashboard data source: {data_source.name} (ID: {data_source.id})")
if not data_source.external_source:
self.stdout.write(self.style.WARNING(f" - No external source linked to {data_source.name}"))
continue
# Get all external chat sessions for this source
external_sessions = ExternalChatSession.objects.all()
session_count = external_sessions.count()
if session_count == 0:
self.stdout.write(self.style.WARNING(" - No external sessions found"))
continue
self.stdout.write(f" - Found {session_count} external sessions")
# Clear existing data if requested
if clear_existing:
existing_count = DashboardChatSession.objects.filter(data_source=data_source).count()
if existing_count > 0:
self.stdout.write(f" - Clearing {existing_count} existing dashboard sessions")
DashboardChatSession.objects.filter(data_source=data_source).delete()
# Process each external session
synced_count = 0
error_count = 0
for ext_session in external_sessions:
try:
with transaction.atomic():
# Create or update dashboard chat session
(
dashboard_session,
created,
) = DashboardChatSession.objects.update_or_create(
data_source=data_source,
session_id=ext_session.session_id,
defaults={
"start_time": ext_session.start_time,
"end_time": ext_session.end_time,
"ip_address": ext_session.ip_address,
"country": ext_session.country or "",
"language": ext_session.language or "",
"messages_sent": ext_session.messages_sent or 0,
"sentiment": ext_session.sentiment or "",
"escalated": ext_session.escalated or False,
"forwarded_hr": ext_session.forwarded_hr or False,
"full_transcript": ext_session.full_transcript_url or "",
"avg_response_time": ext_session.avg_response_time,
"tokens": ext_session.tokens or 0,
"tokens_eur": ext_session.tokens_eur,
"category": ext_session.category or "",
"initial_msg": ext_session.initial_msg or "",
"user_rating": (
str(ext_session.user_rating) if ext_session.user_rating is not None else ""
),
},
)
synced_count += 1
action = "Created" if created else "Updated"
self.stdout.write(f" - {action} session: {dashboard_session.session_id}")
except Exception as e:
self.stdout.write(self.style.ERROR(f" - Error syncing session {ext_session.session_id}: {str(e)}"))
logger.error(
f"Error syncing session {ext_session.session_id}: {e}",
exc_info=True,
)
error_count += 1
self.stdout.write(self.style.SUCCESS(f" - Synced {synced_count} sessions with {error_count} errors"))
total_synced += synced_count
total_errors += error_count
self.stdout.write(
self.style.SUCCESS(f"Sync complete. Total: {total_synced} sessions synced, {total_errors} errors")
)

View File

@ -0,0 +1,110 @@
# Generated by Django 5.2.1 on 2025-05-16 21:25
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("accounts", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="DataSource",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=255)),
("description", models.TextField(blank=True)),
("file", models.FileField(upload_to="data_sources/")),
("uploaded_at", models.DateTimeField(auto_now_add=True)),
(
"company",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="data_sources",
to="accounts.company",
),
),
],
),
migrations.CreateModel(
name="Dashboard",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=255)),
("description", models.TextField(blank=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"company",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="dashboards",
to="accounts.company",
),
),
(
"data_sources",
models.ManyToManyField(related_name="dashboards", to="dashboard.datasource"),
),
],
),
migrations.CreateModel(
name="ChatSession",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("session_id", models.CharField(max_length=255)),
("start_time", models.DateTimeField(blank=True, null=True)),
("end_time", models.DateTimeField(blank=True, null=True)),
("ip_address", models.GenericIPAddressField(blank=True, null=True)),
("country", models.CharField(blank=True, max_length=100)),
("language", models.CharField(blank=True, max_length=50)),
("messages_sent", models.IntegerField(default=0)),
("sentiment", models.CharField(blank=True, max_length=50)),
("escalated", models.BooleanField(default=False)),
("forwarded_hr", models.BooleanField(default=False)),
("full_transcript", models.TextField(blank=True)),
("avg_response_time", models.FloatField(blank=True, null=True)),
("tokens", models.IntegerField(default=0)),
("tokens_eur", models.FloatField(blank=True, null=True)),
("category", models.CharField(blank=True, max_length=100)),
("initial_msg", models.TextField(blank=True)),
("user_rating", models.CharField(blank=True, max_length=50)),
(
"data_source",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="chat_sessions",
to="dashboard.datasource",
),
),
],
),
]

View File

@ -0,0 +1,35 @@
# Generated by Django 5.2.1 on 2025-05-17 23:10
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("dashboard", "0001_initial"),
("data_integration", "0002_externaldatasource_error_count_and_more"),
]
operations = [
migrations.AddField(
model_name="datasource",
name="external_source",
field=models.ForeignKey(
blank=True,
help_text="Link to an external data source",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="data_integration.externaldatasource",
),
),
migrations.AlterField(
model_name="datasource",
name="file",
field=models.FileField(
blank=True,
help_text="Upload a CSV file or leave empty if using an external data source",
null=True,
upload_to="data_sources/",
),
),
]

View File

@ -0,0 +1,16 @@
# Generated by Django 5.2.1 on 2025-05-18 00:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("dashboard", "0002_datasource_external_source_alter_datasource_file"),
]
operations = [
migrations.AlterUniqueTogether(
name="chatsession",
unique_together={("session_id", "data_source")},
),
]

View File

@ -5,11 +5,23 @@ from django.db import models
class DataSource(models.Model):
"""Model for uploaded data sources (CSV files)"""
"""Model for data sources (CSV files or external API data)"""
name = models.CharField(max_length=255)
description = models.TextField(blank=True)
file = models.FileField(upload_to="data_sources/")
file = models.FileField(
upload_to="data_sources/",
blank=True,
null=True,
help_text="Upload a CSV file or leave empty if using an external data source",
)
external_source = models.ForeignKey(
"data_integration.ExternalDataSource",
on_delete=models.SET_NULL,
blank=True,
null=True,
help_text="Link to an external data source",
)
uploaded_at = models.DateTimeField(auto_now_add=True)
company = models.ForeignKey(Company, on_delete=models.CASCADE, related_name="data_sources")
@ -42,6 +54,9 @@ class ChatSession(models.Model):
def __str__(self):
return f"Session {self.session_id}"
class Meta:
unique_together = ("session_id", "data_source")
class Dashboard(models.Model):
"""Model for custom dashboards that can be created by users"""

View File

@ -0,0 +1,79 @@
# dashboard/signals.py
import logging
from dashboard.models import ChatSession as DashboardChatSession
from dashboard.models import DataSource
from data_integration.models import ChatSession as ExternalChatSession
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender=ExternalChatSession)
def sync_external_session_to_dashboard(
sender, # noqa: ARG001
instance,
created,
**kwargs, # noqa: ARG001
):
"""
Signal handler to sync external chat sessions to dashboard chat sessions
whenever an external session is created or updated.
Args:
sender: The model class that sent the signal (unused but required by Django's signal interface)
instance: The ExternalChatSession instance that was saved
created: Boolean indicating if this is a new instance
**kwargs: Additional keyword arguments (unused but required by Django's signal interface)
"""
# Find all dashboard data sources that are linked to this external data source
# Since ExternalChatSession doesn't have a direct link to ExternalDataSource,
# we need to sync to all dashboard data sources with external sources
data_sources = DataSource.objects.exclude(external_source=None)
if not data_sources.exists():
logger.warning(f"No dashboard data sources with external sources found for session {instance.session_id}")
return
for data_source in data_sources:
try:
# Create or update dashboard chat session
dashboard_session, created = DashboardChatSession.objects.update_or_create(
data_source=data_source,
session_id=instance.session_id,
defaults={
"start_time": instance.start_time,
"end_time": instance.end_time,
"ip_address": instance.ip_address,
"country": instance.country or "",
"language": instance.language or "",
"messages_sent": instance.messages_sent or 0,
"sentiment": instance.sentiment or "",
"escalated": instance.escalated or False,
"forwarded_hr": instance.forwarded_hr or False,
"full_transcript": instance.full_transcript_url or "",
"avg_response_time": instance.avg_response_time,
"tokens": instance.tokens or 0,
"tokens_eur": instance.tokens_eur,
"category": instance.category or "",
"initial_msg": instance.initial_msg or "",
"user_rating": (str(instance.user_rating) if instance.user_rating is not None else ""),
},
)
if created:
logger.info(
f"Created dashboard session: {dashboard_session.session_id} for data source {data_source.name}"
)
else:
logger.info(
f"Updated dashboard session: {dashboard_session.session_id} for data source {data_source.name}"
)
except Exception as e:
logger.error(
f"Error syncing session {instance.session_id} to data source {data_source.name}: {e}",
exc_info=True,
)

View File

@ -42,4 +42,6 @@ urlpatterns = [
path("data-view/", views.data_view, name="data_view"),
# Export to CSV
path("export/csv/", views_export.export_chats_csv, name="export_chats_csv"),
# Export to JSON
path("export/json/", views_export.export_chats_json, name="export_chats_json"),
]

View File

@ -200,7 +200,12 @@ def chat_session_detail_view(request, session_id):
# Check if this is an AJAX navigation request
if is_ajax_navigation(request):
html_content = render_to_string("dashboard/chat_session_detail.html", context, request=request)
return JsonResponse({"html": html_content, "title": f"Chat Session {session_id} | Chat Analytics"})
return JsonResponse(
{
"html": html_content,
"title": f"Chat Session {session_id} | Chat Analytics",
}
)
return render(request, "dashboard/chat_session_detail.html", context)
@ -277,7 +282,12 @@ def edit_dashboard_view(request, dashboard_id):
# Check if this is an AJAX navigation request
if is_ajax_navigation(request):
html_content = render_to_string("dashboard/dashboard_form.html", context, request=request)
return JsonResponse({"html": html_content, "title": f"Edit Dashboard: {dashboard.name} | Chat Analytics"})
return JsonResponse(
{
"html": html_content,
"title": f"Edit Dashboard: {dashboard.name} | Chat Analytics",
}
)
return render(request, "dashboard/dashboard_form.html", context)

View File

@ -1,6 +1,7 @@
# dashboard/views_export.py
import csv
import json
from datetime import timedelta
from django.contrib.auth.decorators import login_required
@ -135,3 +136,115 @@ def export_chats_csv(request):
)
return response
@login_required
def export_chats_json(request):
"""Export chat sessions to JSON with filtering options"""
user = request.user
company = user.company
if not company:
return HttpResponse("You are not associated with any company.", status=403)
# Get and apply filters
data_source_id = request.GET.get("data_source_id")
dashboard_id = request.GET.get("dashboard_id")
view = request.GET.get("view", "all")
start_date = request.GET.get("start_date")
end_date = request.GET.get("end_date")
country = request.GET.get("country")
sentiment = request.GET.get("sentiment")
escalated = request.GET.get("escalated")
# Base queryset
sessions = ChatSession.objects.filter(data_source__company=company)
# Apply data source filter if selected
if data_source_id:
data_source = get_object_or_404(DataSource, id=data_source_id, company=company)
sessions = sessions.filter(data_source=data_source)
# Apply dashboard filter if selected
if dashboard_id:
dashboard = get_object_or_404(Dashboard, id=dashboard_id, company=company)
data_sources = dashboard.data_sources.all()
sessions = sessions.filter(data_source__in=data_sources)
# Apply view filter
if view == "recent":
seven_days_ago = timezone.now() - timedelta(days=7)
sessions = sessions.filter(start_time__gte=seven_days_ago)
elif view == "positive":
sessions = sessions.filter(Q(sentiment__icontains="positive"))
elif view == "negative":
sessions = sessions.filter(Q(sentiment__icontains="negative"))
elif view == "escalated":
sessions = sessions.filter(escalated=True)
# Apply additional filters
if start_date:
sessions = sessions.filter(start_time__date__gte=start_date)
if end_date:
sessions = sessions.filter(start_time__date__lte=end_date)
if country:
sessions = sessions.filter(country__icontains=country)
if sentiment:
sessions = sessions.filter(sentiment__icontains=sentiment)
if escalated:
escalated_val = escalated.lower() == "true"
sessions = sessions.filter(escalated=escalated_val)
# Order by most recent first
sessions = sessions.order_by("-start_time")
# Create the filename
filename = "chat_sessions"
if dashboard_id:
dashboard = Dashboard.objects.get(id=dashboard_id)
filename = f"{dashboard.name.replace(' ', '_').lower()}_chat_sessions"
elif data_source_id:
data_source = DataSource.objects.get(id=data_source_id)
filename = f"{data_source.name.replace(' ', '_').lower()}_chat_sessions"
# Prepare the data for JSON export using list comprehension
data = [
{
"session_id": session.session_id,
"start_time": (session.start_time.isoformat() if session.start_time else None),
"end_time": session.end_time.isoformat() if session.end_time else None,
"ip_address": session.ip_address,
"country": session.country,
"language": session.language,
"messages_sent": session.messages_sent,
"sentiment": session.sentiment,
"escalated": session.escalated,
"forwarded_hr": session.forwarded_hr,
"full_transcript": session.full_transcript,
"avg_response_time": session.avg_response_time,
"tokens": session.tokens,
"tokens_eur": session.tokens_eur,
"category": session.category,
"initial_msg": session.initial_msg,
"user_rating": session.user_rating,
}
for session in sessions
]
# Create the HttpResponse with JSON header
response = HttpResponse(content_type="application/json")
response["Content-Disposition"] = f'attachment; filename="{filename}.json"'
# Add company and timestamp to the exported JSON
current_time = timezone.now().isoformat()
export_data = {
"company": company.name,
"export_date": current_time,
"export_type": "chat_sessions",
"data": data,
}
# Write JSON data to the response
json.dump(export_data, response, indent=2)
return response

View File

@ -1 +1,3 @@
# This file is intentionally left empty to mark the directory as a Python package
from .celery import app as celery_app
__all__ = ("celery_app",)

View File

@ -0,0 +1,22 @@
import os
from celery import Celery
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
app = Celery("dashboard_project")
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings", namespace="CELERY")
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print(f"Request: {self.request!r}")

View File

@ -1,16 +1,27 @@
# dashboard_project/settings.py
import logging
import os
from pathlib import Path
from django.core.management.utils import get_random_secret_key
# Load environment variables from .env file if present
try:
from dotenv import load_dotenv
load_dotenv()
except ImportError:
pass
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "django-insecure-your-secret-key-here" # nosec: B105
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", get_random_secret_key())
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
DEBUG = os.environ.get("DJANGO_DEBUG", "True") == "True"
ALLOWED_HOSTS = []
@ -29,9 +40,11 @@ INSTALLED_APPS = [
"allauth.socialaccount",
"crispy_forms",
"crispy_bootstrap5",
"django_celery_beat",
# Custom apps
"dashboard.apps.DashboardConfig",
"accounts.apps.AccountsConfig",
"data_integration",
]
MIDDLEWARE = [
@ -91,7 +104,7 @@ AUTH_PASSWORD_VALIDATORS = [
]
# Internationalization
LANGUAGE_CODE = "nl"
LANGUAGE_CODE = "en-US"
TIME_ZONE = "Europe/Amsterdam"
USE_I18N = True
USE_TZ = True
@ -102,7 +115,14 @@ STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
},
}
# Media files
MEDIA_URL = "/media/"
@ -128,3 +148,50 @@ AUTHENTICATION_BACKENDS = [
]
SITE_ID = 1
ACCOUNT_EMAIL_VERIFICATION = "none"
# Celery Configuration
# Check if Redis is available
try:
import redis
redis_client = redis.Redis(
host=os.environ.get("REDIS_HOST", "localhost"),
port=int(os.environ.get("REDIS_PORT", 6379)),
db=int(os.environ.get("REDIS_DB", 0)),
socket_connect_timeout=2, # 2 seconds timeout
)
redis_client.ping()
# Redis is available, use it
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "redis://localhost:6379/0")
logger = logging.getLogger(__name__)
logger.info("Using Redis for Celery broker and result backend")
except (
ImportError,
redis.exceptions.ConnectionError,
redis.exceptions.TimeoutError,
) as e:
# Redis is not available, use SQLite as fallback (works for development)
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "sqla+sqlite:///celery.sqlite")
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "db+sqlite:///results.sqlite")
logger = logging.getLogger(__name__)
logger.warning(f"Redis connection failed: {str(e)}. Using SQLite for Celery.")
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_TASK_SERIALIZER = "json"
CELERY_RESULT_SERIALIZER = "json"
CELERY_TIMEZONE = TIME_ZONE
CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"
# Get schedule from environment variables or use defaults
CHAT_DATA_FETCH_INTERVAL = int(os.environ.get("CHAT_DATA_FETCH_INTERVAL", 3600)) # Default: 1 hour
CELERY_BEAT_SCHEDULE = {
"fetch_chat_data_periodic": {
"task": "data_integration.tasks.periodic_fetch_chat_data",
"schedule": CHAT_DATA_FETCH_INTERVAL,
"options": {
"expires": CHAT_DATA_FETCH_INTERVAL - 10, # 10 seconds before next run
},
},
}

View File

@ -1,5 +1,6 @@
# dashboard_project/urls.py
from data_integration.views import refresh_specific_datasource
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
@ -7,10 +8,23 @@ from django.urls import include, path
from django.views.generic import RedirectView
urlpatterns = [
# Additional admin URLs should come BEFORE admin.site.urls
path(
"admin/data_integration/externaldatasource/refresh/<int:source_id>/",
refresh_specific_datasource,
name="admin_refresh_datasource",
),
# Alternative URL pattern for direct access
path(
"admin/data_integration/refresh/<int:source_id>/",
refresh_specific_datasource,
name="admin_refresh_datasource_alt",
),
path("admin/", admin.site.urls),
path("accounts/", include("accounts.urls")),
path("dashboard/", include("dashboard.urls")),
path("", RedirectView.as_view(url="dashboard/", permanent=False)),
path("data/", include("data_integration.urls", namespace="data_integration")),
]
if settings.DEBUG:

View File

@ -0,0 +1,125 @@
from django.contrib import admin
from django.utils.html import format_html
from .models import ChatMessage, ChatSession, ExternalDataSource
from .tasks import refresh_specific_source
@admin.register(ExternalDataSource)
class ExternalDataSourceAdmin(admin.ModelAdmin):
list_display = (
"name",
"api_url",
"is_active",
"last_synced",
"status_badge",
"sync_interval",
"refresh_action",
)
list_filter = ("is_active",)
search_fields = ("name", "api_url")
readonly_fields = ("last_synced", "error_count", "last_error")
fieldsets = (
(None, {"fields": ("name", "api_url", "is_active")}),
(
"Authentication",
{
"fields": ("auth_username", "auth_password"),
"description": "Credentials can also be provided via environment variables.",
},
),
("Sync Settings", {"fields": ("sync_interval", "timeout")}),
("Status", {"fields": ("last_synced", "error_count", "last_error")}),
)
@admin.display(description="Status")
def status_badge(self, obj):
"""Display a colored status badge"""
status = obj.get_status()
if status == "Active":
return format_html(
'<span style="color: white; background-color: green; padding: 3px 8px; border-radius: 10px;">{}</span>',
status,
)
elif status == "Inactive":
return format_html(
'<span style="color: white; background-color: gray; padding: 3px 8px; border-radius: 10px;">{}</span>',
status,
)
elif "Error" in status:
return format_html(
'<span style="color: white; background-color: red; padding: 3px 8px; border-radius: 10px;">{}</span>',
status,
)
else:
return format_html(
'<span style="color: white; background-color: orange; padding: 3px 8px; border-radius: 10px;">{}</span>',
status,
)
@admin.display(description="Actions")
def refresh_action(self, obj):
"""Button to manually refresh a data source"""
if obj.is_active:
url = f"/admin/data_integration/externaldatasource/refresh/{obj.id}/"
return format_html('<a class="button" href="{}">Refresh Now</a>', url)
return "Inactive"
def refresh_source(self, request, source_id):
"""Run a task to refresh the source data"""
task = refresh_specific_source.delay(source_id)
self.message_user(request, f"Data refresh task started (Task ID: {task.id})")
def get_urls(self):
from django.urls import path
urls = super().get_urls()
custom_urls = [
path(
"refresh/<int:source_id>/",
self.admin_site.admin_view(self.refresh_source),
name="data_integration_externaldatasource_refresh",
),
]
return custom_urls + urls
@admin.register(ChatSession)
class ChatSessionAdmin(admin.ModelAdmin):
list_display = (
"session_id",
"start_time",
"end_time",
"country",
"language",
"messages_sent",
"sentiment",
)
list_filter = ("country", "language", "sentiment")
search_fields = ("session_id", "country", "ip_address")
readonly_fields = ("session_id",)
@admin.register(ChatMessage)
class ChatMessageAdmin(admin.ModelAdmin):
list_display = ("session", "sender", "timestamp", "message_preview")
list_filter = ("sender", "timestamp")
search_fields = ("message", "session__session_id")
readonly_fields = ("safe_html_display",)
@admin.display(description="Message")
def message_preview(self, obj):
"""Show a preview of the message"""
if len(obj.message) > 50:
return obj.message[:50] + "..."
return obj.message
@admin.display(description="Sanitized HTML Preview")
def safe_html_display(self, obj):
"""Display the sanitized HTML"""
if obj.safe_html_message:
return format_html(
'<div style="padding: 10px; border: 1px solid #ccc; background-color: #f9f9f9;">{}</div>',
obj.safe_html_message,
)
return "No HTML content"

View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class DataIntegrationConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "data_integration"

View File

@ -0,0 +1,27 @@
from data_integration.models import ExternalDataSource
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Create default external data source configuration"
def handle(self, *_args, **_options):
if not ExternalDataSource.objects.exists():
source = ExternalDataSource.objects.create( # nosec: B106
name="Notso AI Chat API",
api_url="https://HOST/COMPANY/chats",
auth_username="DEFAULT_USERNAME", # Will be set via environment variables
auth_password="DEFAULT_PASSWORD", # Will be set via environment variables
is_active=True,
sync_interval=int(self.get_env_var("CHAT_DATA_FETCH_INTERVAL", "3600")),
timeout=int(self.get_env_var("FETCH_DATA_TIMEOUT", "300")),
)
self.stdout.write(self.style.SUCCESS(f"Created default external data source: {source.name}"))
else:
self.stdout.write(self.style.SUCCESS("External data source already exists, no action taken."))
def get_env_var(self, name, default):
"""Get environment variable or return default"""
import os
return os.environ.get(name, default)

View File

@ -0,0 +1,11 @@
from data_integration.utils import fetch_and_store_chat_data
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Fetches chat data from the external API and stores it in the database"
def handle(self, *_args, **_options): # Mark as unused
self.stdout.write(self.style.SUCCESS("Starting data fetch..."))
fetch_and_store_chat_data()
self.stdout.write(self.style.SUCCESS("Successfully fetched and stored chat data."))

View File

@ -0,0 +1,79 @@
#!/usr/bin/env python
"""
Migration Fix Script for ExternalDataSource
This management command adds the missing fields to ExternalDataSource
model directly using SQL, which is useful if Django migrations
are having issues.
"""
import logging
from django.core.management.base import BaseCommand
from django.db import connection
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Fix missing columns in ExternalDataSource table"
def handle(self, *args, **options): # noqa: ARG002
self.stdout.write("Checking ExternalDataSource schema...")
# Check if columns exist
with connection.cursor() as cursor:
cursor.execute("PRAGMA table_info(data_integration_externaldatasource)")
columns = [col[1] for col in cursor.fetchall()]
missing_columns = []
if "error_count" not in columns:
missing_columns.append("error_count")
if "last_error" not in columns:
missing_columns.append("last_error")
if "sync_interval" not in columns:
missing_columns.append("sync_interval")
if "timeout" not in columns:
missing_columns.append("timeout")
if not missing_columns:
self.stdout.write(self.style.SUCCESS("✅ All columns exist in ExternalDataSource table"))
return
self.stdout.write(f"Missing columns: {', '.join(missing_columns)}")
self.stdout.write("Adding missing columns...")
try:
# Add missing columns with SQLite
for col in missing_columns:
if col == "error_count":
cursor.execute(
"ALTER TABLE data_integration_externaldatasource ADD COLUMN error_count integer DEFAULT 0"
)
elif col == "last_error":
cursor.execute(
"ALTER TABLE data_integration_externaldatasource ADD COLUMN last_error varchar(255) NULL"
)
elif col == "sync_interval":
cursor.execute(
"ALTER TABLE data_integration_externaldatasource ADD COLUMN sync_interval integer DEFAULT 3600"
)
elif col == "timeout":
cursor.execute(
"ALTER TABLE data_integration_externaldatasource ADD COLUMN timeout integer DEFAULT 300"
)
self.stdout.write(
self.style.SUCCESS(f"✅ Successfully added missing columns: {', '.join(missing_columns)}")
)
# Verify columns were added
cursor.execute("PRAGMA table_info(data_integration_externaldatasource)")
updated_columns = [col[1] for col in cursor.fetchall()]
self.stdout.write(f"Current columns: {', '.join(updated_columns)}")
except Exception as e:
self.stdout.write(self.style.ERROR(f"❌ Error adding columns: {e}"))
self.stdout.write(self.style.WARNING("Consider running Django migrations instead:"))
self.stdout.write(" python manage.py makemigrations data_integration")
self.stdout.write(" python manage.py migrate data_integration")

View File

@ -0,0 +1,47 @@
import logging
from data_integration.tasks import test_task
from django.core.management.base import BaseCommand
from django.utils import timezone
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Test Celery configuration by executing a simple task"
def handle(self, *args, **options): # noqa: ARG002
self.stdout.write(f"Testing Celery configuration at {timezone.now()}")
try:
# Run the test task
self.stdout.write("Submitting test task to Celery...")
result = test_task.delay()
task_id = result.id
self.stdout.write(f"Task submitted with ID: {task_id}")
self.stdout.write("Waiting for task result (this may take a few seconds)...")
# Try to get the result with a timeout
try:
task_result = result.get(timeout=10) # 10 second timeout
self.stdout.write(self.style.SUCCESS(f"✅ Task completed successfully with result: {task_result}"))
return
except TimeoutError:
self.stdout.write(
self.style.WARNING(
"⚠️ Task did not complete within the timeout period. "
"This might be normal if Celery worker isn't running."
)
)
self.stdout.write(
"To check task status, run Celery worker in another terminal with:\n"
" make celery\n"
f"And then check status of task {task_id}"
)
except Exception as e:
self.stdout.write(self.style.ERROR(f"❌ Error testing Celery: {e}"))
self.stdout.write("Make sure the Celery broker (Redis or SQLite) is properly configured.")
self.stdout.write("To start Celery, run:\n make celery")

View File

@ -0,0 +1,69 @@
#!/usr/bin/env python
"""
Test the ExternalDataSource Model Schema
This management command tests if the ExternalDataSource schema has been correctly updated.
"""
import logging
from data_integration.models import ExternalDataSource
from django.core.management.base import BaseCommand
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Test ExternalDataSource model fields"
def handle(self, *args, **options): # noqa: ARG002
self.stdout.write("Testing ExternalDataSource schema...")
try:
# Get or create a test source
source, created = ExternalDataSource.objects.get_or_create(
name="Test Source",
defaults={
"api_url": "https://example.com/api",
"is_active": False,
},
)
if created:
self.stdout.write(f"Created test source with ID: {source.id}")
else:
self.stdout.write(f"Using existing test source with ID: {source.id}")
# Test setting each field
fields_to_test = {
"error_count": 0,
"last_error": "Test error message",
"sync_interval": 7200,
"timeout": 600,
}
for field, value in fields_to_test.items():
try:
setattr(source, field, value)
self.stdout.write(self.style.SUCCESS(f"✅ Successfully set {field} = {value}"))
except AttributeError:
self.stdout.write(self.style.ERROR(f"❌ Field {field} doesn't exist on the model"))
try:
source.save()
self.stdout.write(self.style.SUCCESS("✅ Successfully saved with all fields"))
except Exception as e:
self.stdout.write(self.style.ERROR(f"❌ Error saving model: {e}"))
# Read back the values to verify
refreshed_source = ExternalDataSource.objects.get(id=source.id)
self.stdout.write("\nVerifying saved values:")
for field, expected_value in fields_to_test.items():
actual_value = getattr(refreshed_source, field, "MISSING")
if actual_value == expected_value:
self.stdout.write(self.style.SUCCESS(f"{field} = {actual_value} (correct)"))
else:
self.stdout.write(self.style.ERROR(f"{field} = {actual_value} (expected: {expected_value})"))
except Exception as e:
self.stdout.write(self.style.ERROR(f"❌ Test failed: {e}"))

View File

@ -0,0 +1,117 @@
import bleach
from bleach.css_sanitizer import CSSSanitizer
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Test the HTML sanitizer with CSS Sanitizer"
def handle(self, *args, **options): # noqa: ARG002
# Create a test HTML string with various style attributes
test_html = """
<div style="color: red; background-color: yellow; transform: rotate(30deg);">
<p style="font-size: 16px; margin: 10px;">
This is a <span style="font-weight: bold; color: blue;">styled</span> paragraph.
</p>
<script>alert('XSS attack');</script>
<a href="javascript:alert('Evil');" style="text-decoration: none;">Dangerous Link</a>
<img src="x" onerror="alert('XSS')" style="border: 1px solid red;">
</div>
"""
# Create CSS sanitizer with allowed properties
css_sanitizer = CSSSanitizer(
allowed_css_properties=[
"color",
"background-color",
"font-family",
"font-size",
"font-weight",
"font-style",
"text-decoration",
"text-align",
"margin",
"margin-left",
"margin-right",
"margin-top",
"margin-bottom",
"padding",
"padding-left",
"padding-right",
"padding-top",
"padding-bottom",
"border",
"border-radius",
"width",
"height",
"line-height",
]
)
# Clean the HTML
cleaned_html = bleach.clean(
test_html,
tags=[
"b",
"i",
"u",
"em",
"strong",
"a",
"br",
"p",
"ul",
"ol",
"li",
"span",
"div",
"pre",
"code",
"blockquote",
],
attributes={
"a": ["href", "title", "target"],
"span": ["style", "class"],
"div": ["style", "class"],
"p": ["style", "class"],
"pre": ["style", "class"],
},
css_sanitizer=css_sanitizer,
strip=True,
)
# Print the results
self.stdout.write(self.style.SUCCESS("Original HTML:"))
self.stdout.write(test_html)
self.stdout.write("\n\n")
self.stdout.write(self.style.SUCCESS("Cleaned HTML:"))
self.stdout.write(cleaned_html)
self.stdout.write("\n\n")
# Check if unsafe attributes and styles were removed
self.stdout.write(self.style.SUCCESS("Security Checks:"))
if "script" not in cleaned_html:
self.stdout.write(self.style.SUCCESS("✓ Script tags removed"))
else:
self.stdout.write(self.style.ERROR("✗ Script tags found"))
if "javascript:" not in cleaned_html:
self.stdout.write(self.style.SUCCESS("✓ JavaScript URLs removed"))
else:
self.stdout.write(self.style.ERROR("✗ JavaScript URLs found"))
if "onerror" not in cleaned_html:
self.stdout.write(self.style.SUCCESS("✓ Event handlers removed"))
else:
self.stdout.write(self.style.ERROR("✗ Event handlers found"))
if "transform" not in cleaned_html:
self.stdout.write(self.style.SUCCESS("✓ Unsafe CSS properties removed"))
else:
self.stdout.write(self.style.ERROR("✗ Unsafe CSS properties found"))
if "img" not in cleaned_html:
self.stdout.write(self.style.SUCCESS("✓ Unsupported tags removed"))
else:
self.stdout.write(self.style.ERROR("✗ Unsupported tags found"))

View File

@ -0,0 +1,68 @@
import logging
from django.conf import settings
from django.core.management.base import BaseCommand
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Test Redis connection for Celery"
def handle(self, *args, **options): # noqa: ARG002
self.stdout.write("Testing Redis connection...")
try:
import redis
# Get Redis configuration from settings
redis_host = getattr(settings, "REDIS_HOST", "localhost")
redis_port = int(getattr(settings, "REDIS_PORT", 6379))
redis_db = int(getattr(settings, "REDIS_DB", 0))
# Override from environment if set
import os
if "REDIS_URL" in os.environ:
self.stdout.write(f"REDIS_URL environment variable found: {os.environ['REDIS_URL']}")
# Try to connect and ping
redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db, socket_connect_timeout=2)
ping_result = redis_client.ping()
if ping_result:
self.stdout.write(
self.style.SUCCESS(
f"✅ Redis connection successful! Connected to {redis_host}:{redis_port}/{redis_db}"
)
)
self.stdout.write(f"Broker URL: {settings.CELERY_BROKER_URL}")
self.stdout.write(f"Result backend: {settings.CELERY_RESULT_BACKEND}")
# Try to set and get a value
test_key = "test_redis_connection"
test_value = "success"
redis_client.set(test_key, test_value)
retrieved_value = redis_client.get(test_key)
if retrieved_value and retrieved_value.decode() == test_value:
self.stdout.write(self.style.SUCCESS("✅ Redis SET/GET test passed!"))
else:
self.stdout.write(
self.style.WARNING(
f"⚠️ Redis SET/GET test failed: Got {retrieved_value} instead of {test_value}"
)
)
# Clean up
redis_client.delete(test_key)
else:
self.stdout.write(self.style.ERROR("❌ Redis ping failed!"))
except redis.exceptions.ConnectionError as e:
self.stdout.write(self.style.ERROR(f"❌ Redis connection error: {e}"))
self.stdout.write("Celery will use SQLite fallback if configured.")
except ImportError:
self.stdout.write(self.style.ERROR("❌ Redis package not installed. Install with: pip install redis"))
except Exception as e:
self.stdout.write(self.style.ERROR(f"❌ Error: {e}"))

View File

@ -0,0 +1,99 @@
# Generated by Django 5.2.1 on 2025-05-17 21:14
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="ChatSession",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("session_id", models.CharField(max_length=255, unique=True)),
("start_time", models.DateTimeField()),
("end_time", models.DateTimeField()),
("ip_address", models.GenericIPAddressField(blank=True, null=True)),
("country", models.CharField(blank=True, max_length=255, null=True)),
("language", models.CharField(blank=True, max_length=255, null=True)),
("messages_sent", models.IntegerField(blank=True, null=True)),
("sentiment", models.CharField(blank=True, max_length=255, null=True)),
("escalated", models.BooleanField(blank=True, null=True)),
("forwarded_hr", models.BooleanField(blank=True, null=True)),
(
"full_transcript_url",
models.URLField(blank=True, max_length=1024, null=True),
),
("avg_response_time", models.FloatField(blank=True, null=True)),
("tokens", models.IntegerField(blank=True, null=True)),
("tokens_eur", models.FloatField(blank=True, null=True)),
("category", models.CharField(blank=True, max_length=255, null=True)),
("initial_msg", models.TextField(blank=True, null=True)),
("user_rating", models.IntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="ExternalDataSource",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(default="External API", max_length=255)),
("api_url", models.URLField(default="https://proto.notso.ai/XY/chats")),
(
"auth_username",
models.CharField(blank=True, max_length=255, null=True),
),
(
"auth_password",
models.CharField(blank=True, max_length=255, null=True),
),
("last_synced", models.DateTimeField(blank=True, null=True)),
("is_active", models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name="ChatMessage",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("timestamp", models.DateTimeField(auto_now_add=True)),
("sender", models.CharField(max_length=255)),
("message", models.TextField()),
("safe_html_message", models.TextField(blank=True, null=True)),
(
"session",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="messages",
to="data_integration.chatsession",
),
),
],
),
]

View File

@ -0,0 +1,43 @@
# Generated by Django 5.2.1 on 2025-05-17 22:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("data_integration", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="externaldatasource",
name="error_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="externaldatasource",
name="last_error",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name="externaldatasource",
name="sync_interval",
field=models.IntegerField(
default=3600,
help_text="Sync interval in seconds. Default is 3600 (1 hour)",
),
),
migrations.AddField(
model_name="externaldatasource",
name="timeout",
field=models.IntegerField(
default=300,
help_text="Timeout in seconds for each sync operation. Default is 300 (5 minutes)",
),
),
migrations.AlterField(
model_name="externaldatasource",
name="api_url",
field=models.URLField(default="https://proto.notso.ai/jumbo/chats"),
),
]

View File

@ -0,0 +1,78 @@
import os
from django.db import models
class ChatSession(models.Model):
session_id = models.CharField(max_length=255, unique=True)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
ip_address = models.GenericIPAddressField(null=True, blank=True)
country = models.CharField(max_length=255, null=True, blank=True)
language = models.CharField(max_length=255, null=True, blank=True)
messages_sent = models.IntegerField(null=True, blank=True)
sentiment = models.CharField(max_length=255, null=True, blank=True)
escalated = models.BooleanField(null=True, blank=True)
forwarded_hr = models.BooleanField(null=True, blank=True)
full_transcript_url = models.URLField(max_length=1024, null=True, blank=True)
avg_response_time = models.FloatField(null=True, blank=True)
tokens = models.IntegerField(null=True, blank=True)
tokens_eur = models.FloatField(null=True, blank=True)
category = models.CharField(max_length=255, null=True, blank=True)
initial_msg = models.TextField(null=True, blank=True)
user_rating = models.IntegerField(null=True, blank=True)
def __str__(self):
return self.session_id
class ChatMessage(models.Model):
session = models.ForeignKey(ChatSession, related_name="messages", on_delete=models.CASCADE)
timestamp = models.DateTimeField(auto_now_add=True) # Changed to auto_now_add for simplicity
sender = models.CharField(max_length=255) # "User" or "Assistant"
message = models.TextField()
safe_html_message = models.TextField(blank=True, null=True) # For storing sanitized HTML
def __str__(self):
return f"{self.session.session_id} - {self.sender} at {self.timestamp}"
class ExternalDataSource(models.Model):
name = models.CharField(max_length=255, default="External API")
api_url = models.URLField(default="https://proto.notso.ai/jumbo/chats")
auth_username = models.CharField(max_length=255, blank=True, null=True)
auth_password = models.CharField(
max_length=255, blank=True, null=True
) # Consider using a more secure way to store credentials
last_synced = models.DateTimeField(null=True, blank=True)
is_active = models.BooleanField(default=True)
error_count = models.IntegerField(default=0)
last_error = models.CharField(max_length=255, blank=True, null=True)
sync_interval = models.IntegerField(default=3600, help_text="Sync interval in seconds. Default is 3600 (1 hour)")
timeout = models.IntegerField(
default=300,
help_text="Timeout in seconds for each sync operation. Default is 300 (5 minutes)",
)
def get_auth_username(self):
"""Get username from environment variable if set, otherwise use stored value"""
env_username = os.environ.get("EXTERNAL_API_USERNAME")
return env_username if env_username else self.auth_username
def get_auth_password(self):
"""Get password from environment variable if set, otherwise use stored value"""
env_password = os.environ.get("EXTERNAL_API_PASSWORD")
return env_password if env_password else self.auth_password
def get_status(self):
"""Get the status of this data source"""
if not self.is_active:
return "Inactive"
if not self.last_synced:
return "Never synced"
if self.error_count > 0:
return f"Error ({self.error_count})"
return "Active"
def __str__(self):
return self.name

View File

@ -0,0 +1,116 @@
import logging
import os
from celery import shared_task
from django.db import utils as django_db_utils
from django.utils import timezone
from .models import ExternalDataSource
from .utils import fetch_and_store_chat_data
logger = logging.getLogger(__name__)
@shared_task(name="data_integration.tasks.test_task", bind=True)
def test_task(self):
"""A simple test task to verify Celery is working without external dependencies."""
logger.info("Test task executed at %s (task_id: %s)", timezone.now(), self.request.id)
return "Test task completed successfully!"
@shared_task(
name="data_integration.tasks.periodic_fetch_chat_data",
bind=True,
autoretry_for=(Exception,),
retry_kwargs={"max_retries": 3, "countdown": 60},
soft_time_limit=int(os.environ.get("FETCH_DATA_TIMEOUT", 300)), # 5 minutes default
)
def periodic_fetch_chat_data(self):
"""Periodically fetch and process chat data from external sources.
This task:
1. Fetches data from all active external data sources
2. Processes and stores the data in the database
3. Updates the last_synced timestamp on each source
4. Handles errors with retries
"""
logger.info("Starting periodic chat data fetch (task_id: %s)...", self.request.id)
try:
# Get all active data sources
active_sources = ExternalDataSource.objects.filter(is_active=True)
if not active_sources.exists():
logger.warning("No active external data sources found. Skipping fetch.")
return "No active data sources found"
successful_sources = []
failed_sources = []
for source in active_sources:
try:
logger.info(f"Processing source: {source.name} (ID: {source.id})")
fetch_and_store_chat_data(source_id=source.id)
source.last_synced = timezone.now()
# Check if error_count field exists in the model
update_fields = ["last_synced"]
try:
source.error_count = 0
source.last_error = None
update_fields.extend(["error_count", "last_error"])
except AttributeError:
# Fields might not exist yet if migrations haven't been applied
logger.warning("New fields not available. Run migrations to enable error tracking.")
source.save(update_fields=update_fields)
successful_sources.append(source.name)
except Exception as e:
logger.error(f"Error fetching data from source {source.name}: {e}", exc_info=True)
try:
source.error_count = getattr(source, "error_count", 0) + 1
source.last_error = str(e)[:255] # Truncate to fit in the field
source.save(update_fields=["error_count", "last_error"])
except (AttributeError, django_db_utils.OperationalError):
# If fields don't exist, just update last_synced
logger.warning("Could not update error fields. Run migrations to enable error tracking.")
source.last_synced = timezone.now()
source.save(update_fields=["last_synced"])
failed_sources.append(source.name)
if failed_sources and not successful_sources:
# If all sources failed, we should raise an exception to trigger retry
raise Exception(f"All data sources failed: {', '.join(failed_sources)}")
result_message = f"Completed: {len(successful_sources)} successful, {len(failed_sources)} failed"
logger.info(result_message)
return result_message
except Exception as e:
logger.error(f"Error during periodic chat data fetch: {e}", exc_info=True)
raise # Re-raise to trigger Celery retry
@shared_task(name="data_integration.tasks.refresh_specific_source", bind=True)
def refresh_specific_source(self, source_id):
"""Manually refresh a specific data source.
Args:
source_id: ID of the ExternalDataSource to refresh
"""
logger.info(f"Starting manual refresh of data source ID: {source_id} (task_id: {self.request.id})")
try:
source = ExternalDataSource.objects.get(id=source_id)
fetch_and_store_chat_data(source_id=source_id)
source.last_synced = timezone.now()
source.error_count = 0
source.last_error = None
source.save(update_fields=["last_synced", "error_count", "last_error"])
logger.info(f"Manual refresh of data source {source.name} completed successfully")
return f"Successfully refreshed data source: {source.name}"
except ExternalDataSource.DoesNotExist:
logger.error(f"Data source with ID {source_id} does not exist")
return f"Error: Data source with ID {source_id} does not exist"
except Exception as e:
logger.error(
f"Error during manual refresh of data source {source_id}: {e}",
exc_info=True,
)
return f"Error: {str(e)}"

View File

@ -0,0 +1 @@
# Create your tests here.

View File

@ -0,0 +1,14 @@
from django.urls import path
from . import views
app_name = "data_integration"
urlpatterns = [
path("manual-refresh/", views.manual_data_refresh, name="manual_data_refresh"),
path(
"refresh/<int:source_id>/",
views.refresh_specific_datasource,
name="refresh_specific_datasource",
),
]

View File

@ -0,0 +1,340 @@
import csv
import logging
from datetime import datetime
import bleach
import requests
from bleach.css_sanitizer import CSSSanitizer
from django.utils.timezone import make_aware
from .models import ChatMessage, ChatSession, ExternalDataSource
logger = logging.getLogger(__name__)
EXPECTED_HEADERS = [
"session_id",
"start_time",
"end_time",
"ip_address",
"country",
"language",
"messages_sent",
"sentiment",
"escalated",
"forwarded_hr",
"full_transcript",
"avg_response_time",
"tokens",
"tokens_eur",
"category",
"initial_msg",
"user_rating",
]
def fetch_and_store_chat_data(source_id=None):
"""Fetch chat data from an external API and store it in the database.
Args:
source_id: Optional ID of specific ExternalDataSource to use.
If None, will use the first active source.
Returns:
dict: Stats about the operation (sessions created, updated, errors)
"""
if source_id:
source = ExternalDataSource.objects.filter(id=source_id, is_active=True).first()
if not source:
logger.error(f"Data source with ID {source_id} not found or not active.")
return {
"success": False,
"error": f"Data source with ID {source_id} not found or not active.",
}
else:
source = ExternalDataSource.objects.filter(is_active=True).first()
if not source:
logger.warning("No active data source found.")
return {"success": False, "error": "No active data source found."}
stats = {
"sessions_created": 0,
"sessions_updated": 0,
"transcripts_processed": 0,
"errors": 0,
"success": True,
}
try:
# Fetch data from API with timeout from source settings or default
timeout = getattr(source, "timeout", 30)
response = requests.get(
source.api_url,
auth=((source.get_auth_username(), source.get_auth_password()) if source.get_auth_username() else None),
timeout=timeout,
)
response.raise_for_status()
except requests.RequestException as e:
error_msg = f"Error fetching data from API {source.api_url}: {e}"
logger.error(error_msg)
return {"success": False, "error": error_msg}
# Process CSV data
csv_data = response.content.decode("utf-8").splitlines()
reader = csv.reader(csv_data)
# Skip header if present, or use predefined if not
# header = next(reader) # Assuming the first row is a header
# For this specific case, we know the header is missing.
header = EXPECTED_HEADERS
for row in reader:
if not row: # Skip empty rows
continue
try:
# Fix for zip() argument mismatch: pad the row with empty strings if needed
padded_row = row + [""] * (len(header) - len(row))
data = dict(zip(header, padded_row, strict=False))
try:
# Try European date format (DD.MM.YYYY) first
start_time = make_aware(datetime.strptime(data["start_time"], "%d.%m.%Y %H:%M:%S"))
except ValueError:
# Fallback to ISO format (YYYY-MM-DD)
start_time = make_aware(datetime.strptime(data["start_time"], "%Y-%m-%d %H:%M:%S"))
try:
# Try European date format (DD.MM.YYYY) first
end_time = make_aware(datetime.strptime(data["end_time"], "%d.%m.%Y %H:%M:%S"))
except ValueError:
# Fallback to ISO format (YYYY-MM-DD)
end_time = make_aware(datetime.strptime(data["end_time"], "%Y-%m-%d %H:%M:%S"))
messages_sent = int(data["messages_sent"]) if data["messages_sent"] else None
escalated = data["escalated"].lower() == "true" if data["escalated"] else None
forwarded_hr = data["forwarded_hr"].lower() == "true" if data["forwarded_hr"] else None
avg_response_time = float(data["avg_response_time"]) if data["avg_response_time"] else None
tokens = int(data["tokens"]) if data["tokens"] else None
tokens_eur = float(data["tokens_eur"]) if data["tokens_eur"] else None
user_rating = int(data["user_rating"]) if data["user_rating"] and data["user_rating"].isdigit() else None
session, created = ChatSession.objects.update_or_create(
session_id=data["session_id"],
defaults={
"start_time": start_time,
"end_time": end_time,
"ip_address": data.get("ip_address"),
"country": data.get("country"),
"language": data.get("language"),
"messages_sent": messages_sent,
"sentiment": data.get("sentiment"),
"escalated": escalated,
"forwarded_hr": forwarded_hr,
"full_transcript_url": data.get("full_transcript"),
"avg_response_time": avg_response_time,
"tokens": tokens,
"tokens_eur": tokens_eur,
"category": data.get("category"),
"initial_msg": data.get("initial_msg"),
"user_rating": user_rating,
},
)
if created:
stats["sessions_created"] += 1
logger.info(f"Created session: {session.session_id}")
else:
stats["sessions_updated"] += 1
logger.info(f"Updated session: {session.session_id}")
# Fetch and process transcript if URL is present
if session.full_transcript_url:
transcript_result = fetch_and_store_transcript(session, timeout)
if transcript_result["success"]:
stats["transcripts_processed"] += 1
except Exception as e:
logger.error(f"Error processing row: {row}. Error: {e}", exc_info=True)
stats["errors"] += 1
continue
source.last_synced = make_aware(datetime.now())
source.save()
logger.info("Data sync complete. Stats: {stats}")
return stats
def fetch_and_store_transcript(session, timeout=30):
"""Fetch and process transcript for a chat session.
Args:
session: The ChatSession object
timeout: Timeout in seconds for the request
Returns:
dict: Result of the operation
"""
result = {"success": False, "messages_created": 0, "error": None}
try:
transcript_response = requests.get(session.full_transcript_url, timeout=timeout)
transcript_response.raise_for_status()
transcript_content = transcript_response.content.decode("utf-8")
messages_created = parse_and_store_transcript_messages(session, transcript_content)
result["success"] = True
result["messages_created"] = messages_created
return result
except requests.RequestException as e:
error_msg = f"Error fetching transcript for session {session.session_id}: {e}"
logger.error(error_msg)
result["error"] = error_msg
return result
except Exception as e:
error_msg = f"Error processing transcript for session {session.session_id}: {e}"
logger.error(error_msg, exc_info=True)
result["error"] = error_msg
return result
def parse_and_store_transcript_messages(session, transcript_content):
"""Parse and store messages from a transcript.
Args:
session: The ChatSession object
transcript_content: The raw transcript content
Returns:
int: Number of messages created
"""
lines = transcript_content.splitlines()
current_sender = None
current_message_lines = []
messages_created = 0
# First, delete existing messages for this session to avoid duplicates
existing_count = ChatMessage.objects.filter(session=session).count()
if existing_count > 0:
logger.info(f"Deleting {existing_count} existing messages for session {session.session_id}")
ChatMessage.objects.filter(session=session).delete()
for line in lines:
if line.startswith("User:"):
if (
current_sender
and current_message_lines
and save_message(session, current_sender, "\n".join(current_message_lines))
):
messages_created += 1
current_sender = "User"
current_message_lines = [line.replace("User:", "").strip()]
elif line.startswith("Assistant:"):
if (
current_sender
and current_message_lines
and save_message(session, current_sender, "\n".join(current_message_lines))
):
messages_created += 1
current_sender = "Assistant"
current_message_lines = [line.replace("Assistant:", "").strip()]
elif current_sender:
current_message_lines.append(line.strip())
# Save the last message
if (
current_sender
and current_message_lines
and save_message(session, current_sender, "\n".join(current_message_lines))
):
messages_created += 1
logger.info(f"Created {messages_created} messages for session {session.session_id}")
return messages_created
def save_message(session, sender, message_text):
"""Save a message for a chat session.
Args:
session: The ChatSession object
sender: The sender of the message ("User" or "Assistant")
message_text: The message text, which may contain HTML
Returns:
bool: True if message was created, False otherwise
"""
if not message_text.strip():
return False
try:
# Create a CSS sanitizer with allowed CSS properties
css_sanitizer = CSSSanitizer(
allowed_css_properties=[
"color",
"background-color",
"font-family",
"font-size",
"font-weight",
"font-style",
"text-decoration",
"text-align",
"margin",
"margin-left",
"margin-right",
"margin-top",
"margin-bottom",
"padding",
"padding-left",
"padding-right",
"padding-top",
"padding-bottom",
"border",
"border-radius",
"width",
"height",
"line-height",
]
)
# Sanitize HTML content before saving if necessary
safe_html = bleach.clean(
message_text,
tags=[
"b",
"i",
"u",
"em",
"strong",
"a",
"br",
"p",
"ul",
"ol",
"li",
"span",
"div",
"pre",
"code",
"blockquote",
],
attributes={
"a": ["href", "title", "target"],
"span": ["style", "class"],
"div": ["style", "class"],
"p": ["style", "class"],
"pre": ["style", "class"],
},
css_sanitizer=css_sanitizer,
strip=True,
)
ChatMessage.objects.create(
session=session,
sender=sender,
message=message_text,
safe_html_message=safe_html,
)
logger.debug(f"Stored message for session {session.session_id} from {sender}")
return True
except Exception as e:
logger.error(f"Error saving message for session {session.session_id}: {e}", exc_info=True)
return False

View File

@ -0,0 +1,54 @@
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required, user_passes_test
from django.shortcuts import get_object_or_404, redirect
from .models import ExternalDataSource
from .tasks import periodic_fetch_chat_data, refresh_specific_source
from .utils import fetch_and_store_chat_data
# Create your views here.
def is_superuser(user):
return user.is_superuser
@login_required
@user_passes_test(is_superuser)
def manual_data_refresh(request):
if request.method == "POST":
try:
# Try to use Celery first
try:
# Asynchronous with Celery
periodic_fetch_chat_data.delay()
messages.success(
request,
"Manual data refresh triggered successfully. The data will be updated shortly.",
)
except Exception:
# Fall back to synchronous if Celery is not available
fetch_and_store_chat_data()
messages.success(
request,
"Manual data refresh completed successfully (synchronous mode).",
)
except Exception as e:
messages.error(request, f"Failed to refresh data: {e}")
return redirect(request.headers.get("referer", "dashboard")) # Redirect to previous page or dashboard
@staff_member_required
def refresh_specific_datasource(request, source_id):
"""View to trigger refresh of a specific data source. Used as a backup for admin URLs."""
source = get_object_or_404(ExternalDataSource, pk=source_id)
try:
# Try to use Celery
task = refresh_specific_source.delay(source_id)
messages.success(request, f"Data refresh task started for {source.name} (Task ID: {task.id})")
except Exception as e:
messages.error(request, f"Failed to refresh data source {source.name}: {e}")
return redirect(request.headers.get("referer", "/admin/data_integration/externaldatasource/"))

View File

@ -0,0 +1,42 @@
# dashboard_project/scripts/cleanup_duplicates.py
import os
import sys
# Add the project root to sys.path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
import django # noqa: I001
django.setup()
from dashboard.models import ChatSession # noqa: E402, I001
from django.db.models import Count # noqa: E402
def cleanup_duplicates():
print("Looking for duplicate ChatSessions...")
duplicates = ChatSession.objects.values("session_id", "data_source").annotate(count=Count("id")).filter(count__gt=1)
total_deleted = 0
for dup in duplicates:
session_id = dup["session_id"]
data_source = dup["data_source"]
# Get all ids for this duplicate group, order by id (keep the first, delete the rest)
ids = list(
ChatSession.objects.filter(session_id=session_id, data_source=data_source)
.order_by("id")
.values_list("id", flat=True)
)
# Keep the first, delete the rest
to_delete = ids[1:]
deleted, _ = ChatSession.objects.filter(id__in=to_delete).delete()
total_deleted += deleted
print(f"Removed {deleted} duplicates for session_id={session_id}, data_source={data_source}")
print(f"Done. Total duplicates removed: {total_deleted}")
if __name__ == "__main__":
cleanup_duplicates()

View File

@ -0,0 +1,200 @@
#!/usr/bin/env python
# scripts/fix_dashboard_data.py
import os
import sys
from datetime import datetime
import django
from accounts.models import Company
from dashboard.models import ChatSession as DashboardChatSession
from dashboard.models import DataSource
from data_integration.models import ChatSession as ExternalChatSession
from data_integration.models import ExternalDataSource
from django.db import transaction
from django.utils.timezone import make_aware
# Set up Django environment
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_project.settings")
django.setup()
# SCRIPT CONFIG
CREATE_TEST_DATA = False # Set to True to create sample data if none exists
COMPANY_NAME = "Notso AI" # The company name to use
def main():
print("Starting dashboard data fix...")
# Get company
company = Company.objects.filter(name=COMPANY_NAME).first()
if not company:
print(f"Error: Company '{COMPANY_NAME}' not found.")
print("Available companies:")
for c in Company.objects.all():
print(f" - {c.name} (ID: {c.id})")
return
print(f"Using company: {company.name} (ID: {company.id})")
# Get or create external data source
ext_source, created = ExternalDataSource.objects.get_or_create(
name="External API Data",
defaults={
"api_url": "https://proto.notso.ai/jumbo/chats",
"is_active": True,
"sync_interval": 3600,
"last_synced": make_aware(datetime.now()),
},
)
if created:
print(f"Created external data source: {ext_source.name} (ID: {ext_source.id})")
else:
print(f"Using existing external data source: {ext_source.name} (ID: {ext_source.id})")
# Get or create dashboard data source linked to external source
dash_source, created = DataSource.objects.get_or_create(
external_source=ext_source,
company=company,
defaults={
"name": "External API Data",
"description": "External data source for chat analytics",
},
)
if created:
print(f"Created dashboard data source: {dash_source.name} (ID: {dash_source.id})")
else:
print(f"Using existing dashboard data source: {dash_source.name} (ID: {dash_source.id})")
# Check for external chat sessions
ext_count = ExternalChatSession.objects.count()
print(f"Found {ext_count} external chat sessions")
if ext_count == 0 and CREATE_TEST_DATA:
print("No external chat sessions found. Creating test data...")
create_test_data(ext_source)
# Sync data from external to dashboard
sync_data(ext_source, dash_source)
print("Done! Check your dashboard for data.")
def create_test_data(ext_source):
"""Create sample chat sessions in the external data source"""
sessions_created = 0
# Create test data with association to the external data source
test_data = [
{
"session_id": "test-session-1",
"start_time": make_aware(datetime.strptime("01.05.2025 10:00:00", "%d.%m.%Y %H:%M:%S")),
"end_time": make_aware(datetime.strptime("01.05.2025 10:15:00", "%d.%m.%Y %H:%M:%S")),
"country": "Netherlands",
"language": "Dutch",
"messages_sent": 10,
"sentiment": "Positive",
"initial_msg": "Can you help me with my order?",
},
{
"session_id": "test-session-2",
"start_time": make_aware(datetime.strptime("02.05.2025 14:30:00", "%d.%m.%Y %H:%M:%S")),
"end_time": make_aware(datetime.strptime("02.05.2025 14:45:00", "%d.%m.%Y %H:%M:%S")),
"country": "Belgium",
"language": "French",
"messages_sent": 12,
"sentiment": "Neutral",
"initial_msg": "Je cherche des informations sur les produits.",
},
{
"session_id": "test-session-3",
"start_time": make_aware(datetime.strptime("03.05.2025 09:15:00", "%d.%m.%Y %H:%M:%S")),
"end_time": make_aware(datetime.strptime("03.05.2025 09:30:00", "%d.%m.%Y %H:%M:%S")),
"country": "Germany",
"language": "German",
"messages_sent": 8,
"sentiment": "Negative",
"initial_msg": "Ich habe ein Problem mit meiner Bestellung.",
},
]
for data in test_data:
ExternalChatSession.objects.create(
session_id=data["session_id"],
start_time=data["start_time"],
end_time=data["end_time"],
ip_address="127.0.0.1",
country=data["country"],
language=data["language"],
messages_sent=data["messages_sent"],
sentiment=data["sentiment"],
escalated=False,
forwarded_hr=False,
initial_msg=data["initial_msg"],
user_rating=5,
external_source=ext_source,
)
sessions_created += 1
print(f"Created {sessions_created} test sessions")
def sync_data(ext_source, dash_source):
"""Sync data from external data source to dashboard data source"""
external_sessions = ExternalChatSession.objects.filter(external_source=ext_source)
session_count = external_sessions.count()
if session_count == 0:
print("No external sessions to sync")
return
print(f"Syncing {session_count} sessions...")
# Clear existing data
existing_count = DashboardChatSession.objects.filter(data_source=dash_source).count()
if existing_count > 0:
print(f"Clearing {existing_count} existing dashboard sessions")
DashboardChatSession.objects.filter(data_source=dash_source).delete()
# Process each external session
synced_count = 0
error_count = 0
for ext_session in external_sessions:
try:
with transaction.atomic():
# Create dashboard chat session
dashboard_session = DashboardChatSession.objects.create(
data_source=dash_source,
session_id=ext_session.session_id,
start_time=ext_session.start_time,
end_time=ext_session.end_time,
ip_address=ext_session.ip_address,
country=ext_session.country or "",
language=ext_session.language or "",
messages_sent=ext_session.messages_sent or 0,
sentiment=ext_session.sentiment or "",
escalated=ext_session.escalated or False,
forwarded_hr=ext_session.forwarded_hr or False,
full_transcript=ext_session.full_transcript_url or "",
avg_response_time=ext_session.avg_response_time,
tokens=ext_session.tokens or 0,
tokens_eur=ext_session.tokens_eur,
category=ext_session.category or "",
initial_msg=ext_session.initial_msg or "",
user_rating=(str(ext_session.user_rating) if ext_session.user_rating is not None else ""),
)
synced_count += 1
print(f"Synced session: {dashboard_session.session_id}")
except Exception as e:
print(f"Error syncing session {ext_session.session_id}: {str(e)}")
error_count += 1
print(f"Sync complete. Total: {synced_count} sessions synced, {error_count} errors")
if __name__ == "__main__":
main()

View File

@ -2,6 +2,186 @@
* dashboard.css - Styles specific to dashboard functionality
*/
/* Theme variables */
:root {
/* Light theme (default) */
--bg-color: #f8f9fa;
--text-color: #212529;
--card-bg: #ffffff;
--card-border: #dee2e6;
--card-header-bg: #f1f3f5;
--sidebar-bg: #f8f9fa;
--navbar-bg: #343a40;
--navbar-color: #ffffff;
--link-color: #007bff;
--secondary-text: #6c757d;
--border-color: #e9ecef;
--input-bg: #ffffff;
--input-border: #ced4da;
--table-stripe: rgba(0, 0, 0, 0.05);
--stats-card-bg: #f1f3f5;
--icon-bg: #e9f2ff;
--icon-color: #007bff;
--theme-transition:
color 0.2s ease, background-color 0.2s ease, border-color 0.2s ease, box-shadow 0.2s ease;
}
/* Dark theme */
[data-bs-theme="dark"] {
--bg-color: #212529;
--text-color: #f8f9fa;
--card-bg: #343a40;
--card-border: #495057;
--card-header-bg: #495057;
--sidebar-bg: #2c3034;
--navbar-bg: #1c1f23;
--navbar-color: #f8f9fa;
--link-color: #6ea8fe;
--secondary-text: #adb5bd;
--border-color: #495057;
--input-bg: #2b3035;
--input-border: #495057;
--table-stripe: rgba(255, 255, 255, 0.05);
--stats-card-bg: #2c3034;
--icon-bg: #1e3a8a;
--icon-color: #6ea8fe;
}
/* Apply theme variables */
body {
background-color: var(--bg-color);
color: var(--text-color);
transition: var(--theme-transition);
}
.card {
background-color: var(--card-bg);
border-color: var(--card-border);
transition: var(--theme-transition);
}
.card-header {
background-color: var(--card-header-bg);
border-bottom-color: var(--card-border);
transition: var(--theme-transition);
}
.navbar-dark {
background-color: var(--navbar-bg) !important;
border-bottom: 1px solid var(--border-color);
}
.navbar-dark .navbar-brand,
.navbar-dark .nav-link,
.navbar-dark .navbar-text {
color: var(--navbar-color) !important;
}
.navbar-dark .btn-outline-light {
border-color: var(--border-color);
color: var(--navbar-color);
}
.navbar-dark .btn-outline-light:hover {
background-color: rgba(255, 255, 255, 0.1);
border-color: var(--border-color);
}
.sidebar {
background-color: var(--sidebar-bg) !important;
}
/* Sidebar navigation styling with dark mode support */
.sidebar .nav-link {
color: var(--text-color);
transition: all 0.2s ease;
border-radius: 0.375rem;
margin: 0.1rem 0.5rem;
padding: 0.5rem 1rem;
}
.sidebar .nav-link:hover {
color: var(--link-color);
background-color: rgba(0, 0, 0, 0.05);
}
[data-bs-theme="dark"] .sidebar .nav-link:hover {
background-color: rgba(255, 255, 255, 0.05);
}
.sidebar .nav-link.active {
color: var(--link-color);
background-color: rgba(13, 110, 253, 0.1);
font-weight: 600;
}
[data-bs-theme="dark"] .sidebar .nav-link.active {
background-color: rgba(110, 168, 254, 0.1);
}
.sidebar .nav-link i {
color: var(--secondary-text);
width: 20px;
text-align: center;
margin-right: 0.5rem;
}
.sidebar .nav-link:hover i,
.sidebar .nav-link.active i {
color: var(--link-color);
}
.sidebar .nav-header {
color: var(--secondary-text);
font-size: 0.8rem;
text-transform: uppercase;
letter-spacing: 0.08em;
padding: 0.5rem 1.25rem;
margin-top: 1rem;
}
.table {
color: var(--text-color);
}
.table-striped tbody tr:nth-of-type(odd) {
background-color: var(--table-stripe);
}
.nav-link {
color: var(--link-color);
}
.stats-card {
background-color: var(--stats-card-bg) !important;
}
.stat-card .stat-icon {
background-color: var(--icon-bg);
color: var(--icon-color);
}
.form-control,
.form-select {
background-color: var(--input-bg);
border-color: var(--input-border);
color: var(--text-color);
}
/* Footer */
footer {
background-color: var(--card-bg);
border-top: 1px solid var(--border-color);
color: var(--secondary-text);
margin-top: 2rem;
padding: 1.5rem 0;
transition: var(--theme-transition);
}
[data-bs-theme="dark"] footer {
background-color: var(--navbar-bg);
}
/* Dashboard grid layout */
.dashboard-grid {
display: grid;
@ -291,7 +471,43 @@
}
}
/* --- Stat Boxes Alignment Fix (Bottom Align, No Overlap) --- */
/* Preserve colored background for stat cards in both themes */
.col-md-3 .card.stats-card.bg-primary {
background-color: var(--bs-primary) !important;
color: white !important;
}
.col-md-3 .card.stats-card.bg-success {
background-color: var(--bs-success) !important;
color: white !important;
}
.col-md-3 .card.stats-card.bg-info {
background-color: var(--bs-info) !important;
color: white !important;
}
.col-md-3 .card.stats-card.bg-warning {
background-color: var(--bs-warning) !important;
color: white !important;
}
.col-md-3 .card.stats-card.bg-danger {
background-color: var(--bs-danger) !important;
color: white !important;
}
.col-md-3 .card.stats-card.bg-secondary {
background-color: var(--bs-secondary) !important;
color: white !important;
}
.col-md-3 .card.stats-card.bg-light {
background-color: var(--bs-light) !important;
color: var(--bs-dark) !important;
}
/* Stats Cards Alignment Fix (Bottom Align, No Overlap) */
.stats-row {
display: flex;
flex-wrap: wrap;

View File

@ -7,6 +7,122 @@
*/
document.addEventListener("DOMContentLoaded", function () {
// Set up Plotly default config based on theme
function updatePlotlyTheme() {
// Force a fresh check of the current theme
const isDarkMode = document.documentElement.getAttribute("data-bs-theme") === "dark";
console.log(
"updatePlotlyTheme called - Current theme mode:",
isDarkMode ? "dark" : "light",
);
window.plotlyDefaultLayout = {
font: {
color: isDarkMode ? "#f8f9fa" : "#212529",
family: '-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif',
},
paper_bgcolor: isDarkMode ? "#343a40" : "#ffffff",
plot_bgcolor: isDarkMode ? "#343a40" : "#ffffff",
colorway: [
"#4285F4",
"#EA4335",
"#FBBC05",
"#34A853",
"#FF6D00",
"#46BDC6",
"#DB4437",
"#0F9D58",
"#AB47BC",
"#00ACC1",
],
margin: {
l: 50,
r: 30,
t: 30,
b: 50,
pad: 10,
},
hovermode: "closest",
xaxis: {
automargin: true,
gridcolor: isDarkMode ? "rgba(255,255,255,0.1)" : "rgba(0,0,0,0.1)",
zerolinecolor: isDarkMode ? "rgba(255,255,255,0.2)" : "rgba(0,0,0,0.2)",
title: {
font: {
color: isDarkMode ? "#f8f9fa" : "#212529",
},
},
tickfont: {
color: isDarkMode ? "#f8f9fa" : "#212529",
},
},
yaxis: {
automargin: true,
gridcolor: isDarkMode ? "rgba(255,255,255,0.1)" : "rgba(0,0,0,0.1)",
zerolinecolor: isDarkMode ? "rgba(255,255,255,0.2)" : "rgba(0,0,0,0.2)",
title: {
font: {
color: isDarkMode ? "#f8f9fa" : "#212529",
},
},
tickfont: {
color: isDarkMode ? "#f8f9fa" : "#212529",
},
},
legend: {
font: {
color: isDarkMode ? "#f8f9fa" : "#212529",
},
bgcolor: isDarkMode ? "rgba(52, 58, 64, 0.8)" : "rgba(255, 255, 255, 0.8)",
},
modebar: {
bgcolor: isDarkMode ? "rgba(52, 58, 64, 0.8)" : "rgba(255, 255, 255, 0.8)",
color: isDarkMode ? "#f8f9fa" : "#212529",
activecolor: isDarkMode ? "#6ea8fe" : "#007bff",
},
};
// Config for specific chart types
window.plotlyBarConfig = {
...window.plotlyDefaultLayout,
bargap: 0.1,
bargroupgap: 0.2,
};
window.plotlyPieConfig = {
...window.plotlyDefaultLayout,
showlegend: true,
legend: {
...window.plotlyDefaultLayout.legend,
xanchor: "center",
yanchor: "top",
y: -0.2,
x: 0.5,
orientation: "h",
},
};
}
// Initialize theme setting
updatePlotlyTheme();
// Listen for theme changes
const observer = new MutationObserver(function (mutations) {
mutations.forEach(function (mutation) {
if (mutation.attributeName === "data-bs-theme") {
console.log(
"Theme changed detected by observer:",
document.documentElement.getAttribute("data-bs-theme"),
);
updatePlotlyTheme();
// Use a small delay to ensure styles have been applied
setTimeout(refreshAllCharts, 100);
}
});
});
observer.observe(document.documentElement, { attributes: true });
// Chart responsiveness
function resizeCharts() {
const charts = document.querySelectorAll(".chart-container");
@ -20,6 +136,66 @@ document.addEventListener("DOMContentLoaded", function () {
});
}
// Refresh all charts with current theme
function refreshAllCharts() {
if (!window.Plotly) return;
const currentTheme = document.documentElement.getAttribute("data-bs-theme");
console.log("Refreshing charts with theme:", currentTheme);
// Update the theme settings
updatePlotlyTheme();
const charts = document.querySelectorAll(".chart-container");
charts.forEach(function (chart) {
if (chart.id) {
try {
// Safe way to check if element has a plot
const plotElement = document.getElementById(chart.id);
if (plotElement && plotElement._fullLayout) {
console.log("Updating chart theme for:", chart.id);
// Determine chart type to apply appropriate settings
let layoutUpdate = { ...window.plotlyDefaultLayout };
// Check if it's a bar chart
if (
plotElement.data &&
plotElement.data.some((trace) => trace.type === "bar")
) {
layoutUpdate = { ...window.plotlyBarConfig };
}
// Check if it's a pie chart
if (
plotElement.data &&
plotElement.data.some((trace) => trace.type === "pie")
) {
layoutUpdate = { ...window.plotlyPieConfig };
}
// Force paper and plot background colors based on current theme
// This ensures the chart background always matches the current theme
layoutUpdate.paper_bgcolor =
currentTheme === "dark" ? "#343a40" : "#ffffff";
layoutUpdate.plot_bgcolor = currentTheme === "dark" ? "#343a40" : "#ffffff";
// Update font colors too
layoutUpdate.font.color = currentTheme === "dark" ? "#f8f9fa" : "#212529";
// Apply layout updates
Plotly.relayout(chart.id, layoutUpdate);
}
} catch (e) {
console.error("Error updating chart theme:", e);
}
}
});
}
// Make refreshAllCharts available globally
window.refreshAllCharts = refreshAllCharts;
// Handle window resize
window.addEventListener("resize", function () {
if (window.Plotly) {
@ -27,6 +203,29 @@ document.addEventListener("DOMContentLoaded", function () {
}
});
// Call resizeCharts on initial load
if (window.Plotly) {
// Use a longer delay to ensure charts are fully loaded
setTimeout(function () {
updatePlotlyTheme();
refreshAllCharts();
}, 300);
}
// Apply theme to newly created charts
const originalPlotlyNewPlot = Plotly.newPlot;
Plotly.newPlot = function () {
const args = Array.from(arguments);
// Get the layout argument (3rd argument)
if (args.length >= 3 && typeof args[2] === "object") {
// Ensure plotlyDefaultLayout is up to date
updatePlotlyTheme();
// Apply current theme to new plot
args[2] = { ...window.plotlyDefaultLayout, ...args[2] };
}
return originalPlotlyNewPlot.apply(this, args);
};
// Time range filtering
const timeRangeDropdown = document.getElementById("timeRangeDropdown");
if (timeRangeDropdown) {
@ -157,11 +356,14 @@ document.addEventListener("DOMContentLoaded", function () {
},
],
{
...window.plotlyDefaultLayout,
margin: { t: 10, r: 10, b: 40, l: 40 },
xaxis: {
...window.plotlyDefaultLayout.xaxis,
title: "Date",
},
yaxis: {
...window.plotlyDefaultLayout.yaxis,
title: "Number of Sessions",
},
},
@ -204,6 +406,7 @@ document.addEventListener("DOMContentLoaded", function () {
},
],
{
...window.plotlyDefaultLayout,
margin: { t: 10, r: 10, b: 10, l: 10 },
},
);
@ -229,8 +432,10 @@ document.addEventListener("DOMContentLoaded", function () {
},
],
{
...window.plotlyDefaultLayout,
margin: { t: 10, r: 10, b: 40, l: 100 },
xaxis: {
...window.plotlyDefaultLayout.xaxis,
title: "Number of Sessions",
},
},
@ -255,6 +460,7 @@ document.addEventListener("DOMContentLoaded", function () {
},
],
{
...window.plotlyDefaultLayout,
margin: { t: 10, r: 10, b: 10, l: 10 },
},
);

View File

@ -148,5 +148,99 @@ document.addEventListener("DOMContentLoaded", function () {
}
}
window.addEventListener("resize", handleSidebarOnResize);
window.addEventListener("resize", handleSidebarOnResize); // Theme toggling functionality
function setTheme(theme, isUserPreference = false) {
console.log("Setting theme to:", theme, "User preference:", isUserPreference);
// Update the HTML attribute that controls theme
document.documentElement.setAttribute("data-bs-theme", theme);
// Save the theme preference to localStorage
localStorage.setItem("theme", theme);
// If this was a user choice (from the toggle button), record that fact
if (isUserPreference) {
localStorage.setItem("userPreferredTheme", "true");
}
// Update toggle button icon
const themeToggle = document.getElementById("theme-toggle");
if (themeToggle) {
const icon = themeToggle.querySelector("i");
if (theme === "dark") {
icon.classList.remove("fa-moon");
icon.classList.add("fa-sun");
themeToggle.setAttribute("title", "Switch to light mode");
themeToggle.setAttribute("aria-label", "Switch to light mode");
} else {
icon.classList.remove("fa-sun");
icon.classList.add("fa-moon");
themeToggle.setAttribute("title", "Switch to dark mode");
themeToggle.setAttribute("aria-label", "Switch to dark mode");
}
}
// If we're on a page with charts, refresh them to match the theme
if (typeof window.refreshAllCharts === "function") {
console.log("Calling refresh charts from theme toggle");
// Add a small delay to ensure DOM updates have completed
setTimeout(window.refreshAllCharts, 100);
}
}
// Check if the user has a system preference for dark mode
function getSystemPreference() {
return window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
}
// Initialize theme based on saved preference or system setting
function initializeTheme() {
// Check if the user has explicitly set a preference
const hasUserPreference = localStorage.getItem("userPreferredTheme") === "true";
const savedTheme = localStorage.getItem("theme");
const systemTheme = getSystemPreference();
console.log("Theme initialization:", {
hasUserPreference,
savedTheme,
systemTheme,
});
// Use saved theme if it exists and was set by user
// Otherwise, use system preference
if (hasUserPreference && savedTheme) {
setTheme(savedTheme);
} else {
// No user preference, use system preference
setTheme(systemTheme);
// Clear any saved theme to ensure it uses system preference
localStorage.removeItem("userPreferredTheme");
}
}
// Initialize theme on page load
initializeTheme();
// Listen for system preference changes
const colorSchemeMediaQuery = window.matchMedia("(prefers-color-scheme: dark)");
colorSchemeMediaQuery.addEventListener("change", (e) => {
// Only update theme based on system if user hasn't set a preference
const hasUserPreference = localStorage.getItem("userPreferredTheme") === "true";
console.log("System preference changed. Following system?", !hasUserPreference);
if (!hasUserPreference) {
setTheme(e.matches ? "dark" : "light");
}
});
// Theme toggle button functionality
const themeToggle = document.getElementById("theme-toggle");
if (themeToggle) {
themeToggle.addEventListener("click", function () {
const currentTheme = document.documentElement.getAttribute("data-bs-theme") || "light";
const newTheme = currentTheme === "dark" ? "light" : "dark";
console.log("Manual theme toggle from", currentTheme, "to", newTheme);
setTheme(newTheme, true); // true indicates this is a user preference
});
}
});

View File

@ -71,6 +71,17 @@
</ul>
<div class="d-flex">
<!-- Theme Toggle Button -->
<button
id="theme-toggle"
class="btn btn-outline-light me-2"
type="button"
aria-label="Toggle theme"
title="Toggle light/dark mode"
>
<i class="fas fa-moon"></i>
</button>
{% if user.is_authenticated %}
<div class="dropdown">
<button
@ -116,7 +127,7 @@
<!-- Sidebar -->
<nav
id="sidebarMenu"
class="col-md-3 col-lg-2 d-md-block bg-light sidebar collapse sticky-top h-100 p-0"
class="col-md-3 col-lg-2 d-md-block sidebar collapse sticky-top h-100 p-0"
>
<div class="sidebar-sticky pt-3">
{% block sidebar %}

View File

@ -62,6 +62,19 @@
</li>
</ul>
</div>
{% if user.is_superuser %}
<form
method="post"
action="{% url 'data_integration:manual_data_refresh' %}"
class="ms-2 d-inline"
>
{% csrf_token %}
<button type="submit" class="btn btn-sm btn-outline-info">
<i class="fas fa-sync"></i> Refresh Data
</button>
</form>
{% endif %}
</div>
</div>

View File

@ -103,15 +103,31 @@
</div>
</div>
<!-- Export to CSV -->
<!-- Export to CSV - Collapsible Section -->
<div class="row mb-4">
<div class="col-12">
<div class="card">
<div class="card-header">
<div class="card-header d-flex justify-content-between align-items-center">
<h5 class="card-title mb-0">Export Data</h5>
<button
class="btn btn-sm btn-outline-secondary"
type="button"
data-bs-toggle="collapse"
data-bs-target="#exportCollapse"
aria-expanded="false"
aria-controls="exportCollapse"
>
<i class="fas fa-chevron-down"></i>
</button>
</div>
<div class="collapse" id="exportCollapse">
<div class="card-body">
<form id="export-form" method="get" action="{% url 'export_chats_csv' %}" class="row g-3">
<form
id="export-form"
method="get"
action="{% url 'export_chats_csv' %}"
class="row g-3"
>
<!-- Pass current filters to export -->
<input type="hidden" name="data_source_id" value="{{ selected_data_source.id }}" />
<input type="hidden" name="view" value="{{ view }}" />
@ -156,11 +172,21 @@
<i class="fas fa-file-csv me-1"></i> Export to CSV
</button>
</div>
<div class="col-md-3 d-flex align-items-end">
<button
type="submit"
class="btn btn-info w-100"
formaction="{% url 'export_chats_json' %}"
>
<i class="fas fa-file-code me-1"></i> Export to JSON
</button>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
<!-- Data Table -->
<div class="row">

124
dev.sh Executable file
View File

@ -0,0 +1,124 @@
#!/bin/bash
# LiveGraphsDjango Development Helper Script
# Set UV_LINK_MODE to copy to avoid hardlink warnings
export UV_LINK_MODE=copy
# Function to print section header
print_header() {
echo "======================================"
echo "🚀 $1"
echo "======================================"
}
# Display help menu
if [[ $1 == "help" ]] || [[ $1 == "-h" ]] || [[ $1 == "--help" ]] || [[ -z $1 ]]; then
print_header "LiveGraphsDjango Development Commands"
echo "Usage: ./dev.sh COMMAND"
echo ""
echo "Available commands:"
echo " start - Start Redis and run the application with foreman"
echo " redis-start - Start Redis server in background"
echo " redis-test - Test Redis connection"
echo " redis-stop - Stop Redis server"
echo " migrate - Run database migrations"
echo " makemigrations - Create database migrations"
echo " superuser - Create a superuser account"
echo " test-celery - Send a test task to Celery"
echo " logs-celery - View logs for Celery"
echo " logs-beat - View logs for Celery Beat"
echo " shell - Open Django shell"
echo " help - Show this help menu"
exit 0
fi
# Start Redis server
if [[ $1 == "redis-start" ]]; then
print_header "Starting Redis Server"
redis-server --daemonize yes
sleep 1
if redis-cli ping >/dev/null 2>&1; then
echo "✅ Redis server is now running"
else
echo "❌ Failed to start Redis server"
fi
exit 0
fi
# Test Redis connection
if [[ $1 == "redis-test" ]]; then
print_header "Testing Redis Connection"
cd dashboard_project && python manage.py test_redis
exit 0
fi
# Stop Redis server
if [[ $1 == "redis-stop" ]]; then
print_header "Stopping Redis Server"
redis-cli shutdown
echo "✅ Redis server has been stopped"
exit 0
fi
# Run migrations
if [[ $1 == "migrate" ]]; then
print_header "Running Migrations"
cd dashboard_project && UV_LINK_MODE=copy uv run python manage.py migrate
exit 0
fi
# Make migrations
if [[ $1 == "makemigrations" ]]; then
print_header "Creating Migrations"
cd dashboard_project && UV_LINK_MODE=copy uv run python manage.py makemigrations
exit 0
fi
# Create superuser
if [[ $1 == "superuser" ]]; then
print_header "Creating Superuser"
cd dashboard_project && UV_LINK_MODE=copy uv run python manage.py createsuperuser
exit 0
fi
# Test Celery
if [[ $1 == "test-celery" ]]; then
print_header "Testing Celery"
cd dashboard_project && UV_LINK_MODE=copy uv run python manage.py test_celery
exit 0
fi
# View Celery logs
if [[ $1 == "logs-celery" ]]; then
print_header "Celery Worker Logs"
echo "Press Ctrl+C to exit"
cd dashboard_project && UV_LINK_MODE=copy uv run celery -A dashboard_project worker --loglevel=info
exit 0
fi
# View Celery Beat logs
if [[ $1 == "logs-beat" ]]; then
print_header "Celery Beat Logs"
echo "Press Ctrl+C to exit"
cd dashboard_project && UV_LINK_MODE=copy uv run celery -A dashboard_project beat --scheduler django_celery_beat.schedulers:DatabaseScheduler
exit 0
fi
# Django shell
if [[ $1 == "shell" ]]; then
print_header "Django Shell"
cd dashboard_project && UV_LINK_MODE=copy uv run python manage.py shell
exit 0
fi
# Start the application
if [[ $1 == "start" ]]; then
print_header "Starting LiveGraphsDjango Application"
./start.sh
exit 0
fi
# Invalid command
echo "❌ Unknown command: $1"
echo "Run './dev.sh help' to see available commands"
exit 1

View File

@ -11,14 +11,17 @@ services:
- static_volume:/app/staticfiles
- media_volume:/app/media
ports:
- "8000:8000"
- 8000:8000
environment:
- DEBUG=0
- SECRET_KEY=your_secret_key_here
- ALLOWED_HOSTS=localhost,127.0.0.1
- DJANGO_SETTINGS_MODULE=dashboard_project.settings
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
depends_on:
- db
- redis
db:
image: postgres:13
@ -29,12 +32,52 @@ services:
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=dashboard_db
ports:
- "5432:5432"
- 5432:5432
redis:
image: redis:7-alpine
ports:
- 6379:6379
volumes:
- redis_data:/data
healthcheck:
test: [CMD, redis-cli, ping]
interval: 30s
timeout: 10s
retries: 3
celery:
build: .
command: celery -A dashboard_project worker --loglevel=info
volumes:
- .:/app
environment:
- DEBUG=0
- DJANGO_SETTINGS_MODULE=dashboard_project.settings
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
depends_on:
- redis
- web
celery-beat:
build: .
command: celery -A dashboard_project beat --scheduler django_celery_beat.schedulers:DatabaseScheduler
volumes:
- .:/app
environment:
- DEBUG=0
- DJANGO_SETTINGS_MODULE=dashboard_project.settings
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
depends_on:
- redis
- web
nginx:
image: nginx:latest
ports:
- "80:80"
- 80:80
volumes:
- ./nginx/conf.d:/etc/nginx/conf.d
- static_volume:/app/staticfiles
@ -44,5 +87,6 @@ services:
volumes:
postgres_data:
redis_data:
static_volume:
media_volume:

172
docs/CELERY_REDIS.md Normal file
View File

@ -0,0 +1,172 @@
# Redis and Celery Configuration
This document explains how to set up and use Redis and Celery for background task processing in the LiveGraphs application.
## Overview
The data integration module uses Celery to handle:
- Periodic data fetching from external APIs
- Processing and storing CSV data
- Downloading and parsing transcript files
- Manual data refresh triggered by users
## Installation
### Redis (Recommended)
Redis is the recommended message broker for Celery due to its performance and reliability:
#### Ubuntu/Debian
```bash
sudo apt update
sudo apt install redis-server
sudo systemctl start redis-server
sudo systemctl enable redis-server
# Verify that Redis is running
redis-cli ping # Should output PONG
```
After installation, check if Redis is properly configured:
1. Open Redis configuration file:
```bash
sudo nano /etc/redis/redis.conf
```
2. Ensure the following settings:
```bash
# For development (localhost only)
bind 127.0.0.1
# For production (accept connections from specific IP)
# bind 127.0.0.1 your.server.ip.address
# Protected mode (recommended)
protected-mode yes
# Port
port 6379
```
3. Restart Redis after any changes:
```bash
sudo systemctl restart redis-server
```
#### macOS
```bash
brew install redis
brew services start redis
```
#### Windows
Download and install from [microsoftarchive/redis](https://github.com/microsoftarchive/redis/releases)
### SQLite Fallback
If Redis is not available, the application will automatically fall back to using SQLite for Celery tasks. This works well for development but is not recommended for production.
## Configuration
### Environment Variables
Set these environment variables in your `.env` file or deployment environment:
```env
# Redis Configuration
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_DB=0
CELERY_BROKER_URL=redis://localhost:6379/0
CELERY_RESULT_BACKEND=redis://localhost:6379/0
# Task Scheduling
CHAT_DATA_FETCH_INTERVAL=3600 # In seconds (1 hour)
FETCH_DATA_TIMEOUT=300 # In seconds (5 minutes)
```
### Testing Redis Connection
To test if Redis is properly configured:
```bash
cd dashboard_project
python manage.py test_redis
```
### Testing Celery
To test if Celery is working correctly:
```bash
# Start a Celery worker in one terminal
make celery
# In another terminal, run the test task
cd dashboard_project
python manage.py test_celery
```
## Running with Docker
The included `docker-compose.yml` file sets up Redis, Celery worker, and Celery beat for you:
```bash
docker-compose up -d
```
## Running in Development
Development requires multiple terminal windows:
1. **Django Development Server**:
```bash
make run
```
2. **Redis Server** (if needed):
```bash
make run-redis
```
3. **Celery Worker**:
```bash
make celery
```
4. **Celery Beat** (for scheduled tasks):
```bash
make celery-beat
```
Or use the combined command:
```bash
make run-all
```
## Common Issues
### Redis Connection Failures
If you see connection errors:
1. Check that Redis is running: `redis-cli ping` should return `PONG`
2. Verify firewall settings are not blocking port 6379
3. Check Redis binding in `/etc/redis/redis.conf` (should be `bind 127.0.0.1` for local dev)
### Celery Workers Not Processing Tasks
1. Ensure the worker is running with the correct app name: `celery -A dashboard_project worker`
2. Check the Celery logs for errors
3. Verify broker URL settings in both code and environment variables

142
docs/TROUBLESHOOTING.md Normal file
View File

@ -0,0 +1,142 @@
# Redis and Celery Troubleshooting Guide
This guide provides detailed steps to diagnose and fix issues with Redis and Celery in the LiveGraphs project.
## Diagnosing Redis Connection Issues
### Check if Redis is Running
```bash
# Check Redis server status
sudo systemctl status redis-server
# Try to ping Redis
redis-cli ping # Should return PONG
```
### Test Redis Connectivity
Use our built-in test tool:
```bash
cd dashboard_project
python manage.py test_redis
```
If this fails, check the following:
1. Redis might not be running. Start it with:
```bash
sudo systemctl start redis-server
```
2. Connection credentials may be incorrect. Check your environment variables:
```bash
echo $REDIS_URL
echo $CELERY_BROKER_URL
echo $CELERY_RESULT_BACKEND
```
3. Redis might be binding only to a specific interface. Check `/etc/redis/redis.conf`:
```bash
grep "bind" /etc/redis/redis.conf
```
4. Firewall rules might be blocking Redis. If you're connecting remotely:
```bash
sudo ufw status # Check if firewall is enabled
sudo ufw allow 6379/tcp # Allow Redis port if needed
```
## Fixing CSV Data Processing Issues
If you see the error `zip() argument 2 is shorter than argument 1`, it means the data format doesn't match the expected headers. We've implemented a fix that:
1. Pads shorter rows with empty strings
2. Uses more flexible date format parsing
3. Provides better error handling
After these changes, your data should be processed correctly regardless of format variations.
## Testing Celery Tasks
To verify if your Celery configuration is working:
```bash
# Start a Celery worker in one terminal
cd dashboard_project
celery -A dashboard_project worker --loglevel=info
# In another terminal, run the test task
cd dashboard_project
python manage.py test_celery
```
If the task isn't completing, check:
1. Look for errors in the Celery worker terminal
2. Verify broker URL settings match in both terminals:
```bash
echo $CELERY_BROKER_URL
```
3. Check if Redis is accessible from both terminals:
```bash
redis-cli ping
```
## Checking Scheduled Tasks
To verify if scheduled tasks are configured correctly:
```bash
# List all scheduled tasks
cd dashboard_project
python manage.py celery inspect scheduled
```
Common issues with scheduled tasks:
1. **Celery Beat not running**: Start it with:
```bash
cd dashboard_project
celery -A dashboard_project beat
```
2. **Task registered but not running**: Check worker logs for any errors
3. **Wrong schedule**: Check the interval in settings.py and CELERY_BEAT_SCHEDULE
## Data Source Configuration
If data sources aren't being processed correctly:
1. Verify active data sources exist:
```bash
cd dashboard_project
python manage.py shell -c "from data_integration.models import ExternalDataSource; print(ExternalDataSource.objects.filter(is_active=True).count())"
```
2. Create a default data source if needed:
```bash
cd dashboard_project
python manage.py create_default_datasource
```
3. Check source URLs and credentials in the admin interface or environment variables.
## Manually Triggering Data Refresh
To manually trigger a data refresh for testing:
```bash
cd dashboard_project
python manage.py shell -c "from data_integration.tasks import periodic_fetch_chat_data; periodic_fetch_chat_data()"
```
This will execute the task directly without going through Celery, which is useful for debugging.

View File

@ -17,15 +17,22 @@ classifiers = [
]
dependencies = [
"bleach[css]>=6.2.0",
"celery[sqlalchemy]>=5.5.2",
"crispy-bootstrap5>=2025.4",
"django>=5.2.1",
"django-allauth>=65.8.0",
"django-celery-beat>=2.8.1",
"django-crispy-forms>=2.4",
"gunicorn>=23.0.0",
"numpy>=2.2.5",
"pandas>=2.2.3",
"plotly>=6.1.0",
"python-dotenv>=1.1.0",
"redis>=6.1.0",
"requests>=2.32.3",
"sqlalchemy>=2.0.41",
"tinycss2>=1.4.0",
"whitenoise>=6.9.0",
]
@ -103,7 +110,15 @@ indent-style = "space"
line-ending = "lf"
[tool.bandit]
exclude_dirs = ["tests", "venv", ".venv", ".git", "__pycache__", "migrations", "**/create_sample_data.py"]
exclude_dirs = [
"tests",
"venv",
".venv",
".git",
"__pycache__",
"migrations",
"**/create_sample_data.py",
]
skips = ["B101"]
targets = ["dashboard_project"]

View File

@ -1,6 +1,10 @@
# This file was autogenerated by uv via the following command:
# uv export --frozen --output-file=requirements.txt
-e .
amqp==5.3.1 \
--hash=sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2 \
--hash=sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432
# via kombu
asgiref==3.8.1 \
--hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \
--hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590
@ -11,6 +15,10 @@ asgiref==3.8.1 \
bandit==1.8.3 \
--hash=sha256:28f04dc0d258e1dd0f99dee8eefa13d1cb5e3fde1a5ab0c523971f97b289bcd8 \
--hash=sha256:f5847beb654d309422985c36644649924e0ea4425c76dec2e89110b87506193a
billiard==4.2.1 \
--hash=sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f \
--hash=sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb
# via celery
black==25.1.0 \
--hash=sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171 \
--hash=sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666 \
@ -18,14 +26,62 @@ black==25.1.0 \
--hash=sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717 \
--hash=sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18 \
--hash=sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3
bleach==6.2.0 \
--hash=sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e \
--hash=sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f
# via livegraphsdjango
celery==5.5.2 \
--hash=sha256:4d6930f354f9d29295425d7a37261245c74a32807c45d764bedc286afd0e724e \
--hash=sha256:54425a067afdc88b57cd8d94ed4af2ffaf13ab8c7680041ac2c4ac44357bdf4c
# via
# django-celery-beat
# livegraphsdjango
certifi==2025.4.26 \
--hash=sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6 \
--hash=sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3
# via requests
cfgv==3.4.0 \
--hash=sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9 \
--hash=sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560
# via pre-commit
charset-normalizer==3.4.2 \
--hash=sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0 \
--hash=sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b \
--hash=sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff \
--hash=sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e \
--hash=sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148 \
--hash=sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63 \
--hash=sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c \
--hash=sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0 \
--hash=sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0 \
--hash=sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1 \
--hash=sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980 \
--hash=sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7 \
--hash=sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691 \
--hash=sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf \
--hash=sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b
# via requests
click==8.2.0 \
--hash=sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c \
--hash=sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d
# via black
# via
# black
# celery
# click-didyoumean
# click-plugins
# click-repl
click-didyoumean==0.3.1 \
--hash=sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463 \
--hash=sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c
# via celery
click-plugins==1.1.1 \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
# via celery
click-repl==0.3.0 \
--hash=sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9 \
--hash=sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812
# via celery
colorama==0.4.6 ; sys_platform == 'win32' \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
@ -60,6 +116,10 @@ crispy-bootstrap5==2025.4 \
--hash=sha256:51efa19c7d40e339774a6fe23407e83b95b7634cad6de70fd1f1093131bea1d9 \
--hash=sha256:d675ea7e245048905077dfe16bf1fa1ee16842f52fe88164ccc8a5e2d11119b3
# via livegraphsdjango
cron-descriptor==1.4.5 \
--hash=sha256:736b3ae9d1a99bc3dbfc5b55b5e6e7c12031e7ba5de716625772f8b02dcd6013 \
--hash=sha256:f51ce4ffc1d1f2816939add8524f206c376a42c87a5fca3091ce26725b3b1bca
# via django-celery-beat
distlib==0.3.9 \
--hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \
--hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403
@ -70,14 +130,20 @@ django==5.2.1 \
# via
# crispy-bootstrap5
# django-allauth
# django-celery-beat
# django-crispy-forms
# django-debug-toolbar
# django-stubs
# django-stubs-ext
# django-timezone-field
# livegraphsdjango
django-allauth==65.8.0 \
--hash=sha256:9da589d99d412740629333a01865a90c95c97e0fae0cde789aa45a8fda90e83b
# via livegraphsdjango
django-celery-beat==2.8.1 \
--hash=sha256:da2b1c6939495c05a551717509d6e3b79444e114a027f7b77bf3727c2a39d171 \
--hash=sha256:dfad0201c0ac50c91a34700ef8fa0a10ee098cc7f3375fe5debed79f2204f80a
# via livegraphsdjango
django-crispy-forms==2.4 \
--hash=sha256:5a4b99876cfb1bdd3e47727731b6d4197c51c0da502befbfbec6a93010b02030 \
--hash=sha256:915e1ffdeb2987d78b33fabfeff8e5203c8776aa910a3a659a2c514ca125f3bd
@ -94,10 +160,34 @@ django-stubs-ext==5.2.0 \
--hash=sha256:00c4ae307b538f5643af761a914c3f8e4e3f25f4e7c6d7098f1906c0d8f2aac9 \
--hash=sha256:b27ae0aab970af4894ba4e9b3fcd3e03421dc8731516669659ee56122d148b23
# via django-stubs
django-timezone-field==7.1 \
--hash=sha256:93914713ed882f5bccda080eda388f7006349f25930b6122e9b07bf8db49c4b4 \
--hash=sha256:b3ef409d88a2718b566fabe10ea996f2838bc72b22d3a2900c0aa905c761380c
# via django-celery-beat
filelock==3.18.0 \
--hash=sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2 \
--hash=sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de
# via virtualenv
greenlet==3.2.2 ; (python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64') \
--hash=sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207 \
--hash=sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4 \
--hash=sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825 \
--hash=sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708 \
--hash=sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763 \
--hash=sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf \
--hash=sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d \
--hash=sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59 \
--hash=sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51 \
--hash=sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5 \
--hash=sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf \
--hash=sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b \
--hash=sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240 \
--hash=sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485 \
--hash=sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8 \
--hash=sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418 \
--hash=sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325 \
--hash=sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421
# via sqlalchemy
gunicorn==23.0.0 \
--hash=sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d \
--hash=sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec
@ -106,10 +196,18 @@ identify==2.6.10 \
--hash=sha256:45e92fd704f3da71cc3880036633f48b4b7265fd4de2b57627cb157216eb7eb8 \
--hash=sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25
# via pre-commit
idna==3.10 \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
# via requests
iniconfig==2.1.0 \
--hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \
--hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760
# via pytest
kombu==5.5.3 \
--hash=sha256:021a0e11fcfcd9b0260ef1fb64088c0e92beb976eb59c1dfca7ddd4ad4562ea2 \
--hash=sha256:5b0dbceb4edee50aa464f59469d34b97864be09111338cfb224a10b6a163909b
# via celery
markdown-it-py==3.0.0 \
--hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
--hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
@ -215,6 +313,10 @@ pluggy==1.6.0 \
pre-commit==4.2.0 \
--hash=sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146 \
--hash=sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd
prompt-toolkit==3.0.51 \
--hash=sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07 \
--hash=sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed
# via click-repl
pygments==2.19.1 \
--hash=sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f \
--hash=sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c
@ -226,10 +328,17 @@ pytest==8.3.5 \
pytest-django==4.11.1 \
--hash=sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10 \
--hash=sha256:a949141a1ee103cb0e7a20f1451d355f83f5e4a5d07bdd4dcfdd1fd0ff227991
python-crontab==3.2.0 \
--hash=sha256:40067d1dd39ade3460b2ad8557c7651514cd3851deffff61c5c60e1227c5c36b \
--hash=sha256:82cb9b6a312d41ff66fd3caf3eed7115c28c195bfb50711bc2b4b9592feb9fe5
# via django-celery-beat
python-dateutil==2.9.0.post0 \
--hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
--hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
# via pandas
# via
# celery
# pandas
# python-crontab
python-dotenv==1.1.0 \
--hash=sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5 \
--hash=sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d
@ -252,6 +361,14 @@ pyyaml==6.0.2 \
# via
# bandit
# pre-commit
redis==6.1.0 \
--hash=sha256:3b72622f3d3a89df2a6041e82acd896b0e67d9f54e9bcd906d091d23ba5219f6 \
--hash=sha256:c928e267ad69d3069af28a9823a07726edf72c7e37764f43dc0123f37928c075
# via livegraphsdjango
requests==2.32.3 \
--hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
--hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
# via livegraphsdjango
rich==14.0.0 \
--hash=sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0 \
--hash=sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725
@ -283,6 +400,20 @@ six==1.17.0 \
--hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
--hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
# via python-dateutil
sqlalchemy==2.0.41 \
--hash=sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443 \
--hash=sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23 \
--hash=sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576 \
--hash=sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df \
--hash=sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1 \
--hash=sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f \
--hash=sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d \
--hash=sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc \
--hash=sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a \
--hash=sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9
# via
# celery
# livegraphsdjango
sqlparse==0.5.3 \
--hash=sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272 \
--hash=sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca
@ -293,6 +424,12 @@ stevedore==5.4.1 \
--hash=sha256:3135b5ae50fe12816ef291baff420acb727fcd356106e3e9cbfa9e5985cd6f4b \
--hash=sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe
# via bandit
tinycss2==1.4.0 \
--hash=sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7 \
--hash=sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289
# via
# bleach
# livegraphsdjango
types-pyyaml==6.0.12.20250516 \
--hash=sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530 \
--hash=sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba
@ -304,16 +441,40 @@ typing-extensions==4.13.2 \
# django-stubs
# django-stubs-ext
# mypy
# sqlalchemy
tzdata==2025.2 \
--hash=sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8 \
--hash=sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9
# via
# django
# django-celery-beat
# kombu
# pandas
urllib3==2.4.0 \
--hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \
--hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813
# via requests
vine==5.1.0 \
--hash=sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc \
--hash=sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0
# via
# amqp
# celery
# kombu
virtualenv==20.31.2 \
--hash=sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11 \
--hash=sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af
# via pre-commit
wcwidth==0.2.13 \
--hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \
--hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5
# via prompt-toolkit
webencodings==0.5.1 \
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
# via
# bleach
# tinycss2
whitenoise==6.9.0 \
--hash=sha256:8c4a7c9d384694990c26f3047e118c691557481d624f069b7f7752a2f735d609 \
--hash=sha256:c8a489049b7ee9889617bb4c274a153f3d979e8f51d2efd0f5b403caf41c57df

36
start.sh Executable file
View File

@ -0,0 +1,36 @@
#!/bin/bash
# Set UV_LINK_MODE to copy to avoid hardlink warnings
export UV_LINK_MODE=copy
# Check if Redis is running
if ! redis-cli ping >/dev/null 2>&1; then
echo "Starting Redis server..."
redis-server --daemonize yes
sleep 1
# Verify Redis is now running
if redis-cli ping >/dev/null 2>&1; then
echo "✅ Redis server is now running"
else
echo "❌ Failed to start Redis server"
echo "Falling back to SQLite for Celery"
export CELERY_BROKER_URL=sqla+sqlite:///dashboard_project/celery.sqlite
export CELERY_RESULT_BACKEND=db+sqlite:///dashboard_project/results.sqlite
fi
else
echo "✅ Redis server is already running"
fi
# Set environment variables for Redis if it's running
if redis-cli ping >/dev/null 2>&1; then
export CELERY_BROKER_URL=redis://localhost:6379/0
export CELERY_RESULT_BACKEND=redis://localhost:6379/0
echo "Using Redis for Celery broker and result backend"
else
export CELERY_BROKER_URL=sqla+sqlite:///dashboard_project/celery.sqlite
export CELERY_RESULT_BACKEND=db+sqlite:///dashboard_project/results.sqlite
echo "Using SQLite for Celery broker and result backend"
fi
# Start the application using foreman
foreman start

347
uv.lock generated
View File

@ -2,6 +2,18 @@ version = 1
revision = 2
requires-python = ">=3.13"
[[package]]
name = "amqp"
version = "5.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "vine" },
]
sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" },
]
[[package]]
name = "asgiref"
version = "3.8.1"
@ -26,6 +38,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/85/db74b9233e0aa27ec96891045c5e920a64dd5cbccd50f8e64e9460f48d35/bandit-1.8.3-py3-none-any.whl", hash = "sha256:28f04dc0d258e1dd0f99dee8eefa13d1cb5e3fde1a5ab0c523971f97b289bcd8", size = 129078, upload-time = "2025-02-17T05:24:54.068Z" },
]
[[package]]
name = "billiard"
version = "4.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" },
]
[[package]]
name = "black"
version = "25.1.0"
@ -46,6 +67,56 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" },
]
[[package]]
name = "bleach"
version = "6.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "webencodings" },
]
sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" },
]
[package.optional-dependencies]
css = [
{ name = "tinycss2" },
]
[[package]]
name = "celery"
version = "5.5.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "billiard" },
{ name = "click" },
{ name = "click-didyoumean" },
{ name = "click-plugins" },
{ name = "click-repl" },
{ name = "kombu" },
{ name = "python-dateutil" },
{ name = "vine" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bf/03/5d9c6c449248958f1a5870e633a29d7419ff3724c452a98ffd22688a1a6a/celery-5.5.2.tar.gz", hash = "sha256:4d6930f354f9d29295425d7a37261245c74a32807c45d764bedc286afd0e724e", size = 1666892, upload-time = "2025-04-25T20:10:04.695Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/94/8e825ac1cf59d45d20c4345d4461e6b5263ae475f708d047c3dad0ac6401/celery-5.5.2-py3-none-any.whl", hash = "sha256:54425a067afdc88b57cd8d94ed4af2ffaf13ab8c7680041ac2c4ac44357bdf4c", size = 438626, upload-time = "2025-04-25T20:10:01.383Z" },
]
[package.optional-dependencies]
sqlalchemy = [
{ name = "sqlalchemy" },
]
[[package]]
name = "certifi"
version = "2025.4.26"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" },
]
[[package]]
name = "cfgv"
version = "3.4.0"
@ -55,6 +126,28 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" },
]
[[package]]
name = "charset-normalizer"
version = "3.4.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" },
{ url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" },
{ url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" },
{ url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" },
{ url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" },
{ url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" },
{ url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" },
{ url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" },
{ url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" },
{ url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" },
{ url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" },
{ url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" },
{ url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" },
{ url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" },
]
[[package]]
name = "click"
version = "8.2.0"
@ -67,6 +160,43 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/58/1f37bf81e3c689cc74ffa42102fa8915b59085f54a6e4a80bc6265c0f6bf/click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c", size = 102156, upload-time = "2025-05-10T22:21:01.352Z" },
]
[[package]]
name = "click-didyoumean"
version = "0.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
]
sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" },
]
[[package]]
name = "click-plugins"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5f/1d/45434f64ed749540af821fd7e42b8e4d23ac04b1eda7c26613288d6cd8a8/click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", size = 8164, upload-time = "2019-04-04T04:27:04.82Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/da/824b92d9942f4e472702488857914bdd50f73021efea15b4cad9aca8ecef/click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8", size = 7497, upload-time = "2019-04-04T04:27:03.36Z" },
]
[[package]]
name = "click-repl"
version = "0.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "prompt-toolkit" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" },
]
[[package]]
name = "colorama"
version = "0.4.6"
@ -118,6 +248,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/9a/4f1166cc82c9f777cf9a5bc2a75171d63301ac317c5de8f59bd44bfe2b7a/crispy_bootstrap5-2025.4-py3-none-any.whl", hash = "sha256:51efa19c7d40e339774a6fe23407e83b95b7634cad6de70fd1f1093131bea1d9", size = 24772, upload-time = "2025-04-02T12:33:14.904Z" },
]
[[package]]
name = "cron-descriptor"
version = "1.4.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/02/83/70bd410dc6965e33a5460b7da84cf0c5a7330a68d6d5d4c3dfdb72ca117e/cron_descriptor-1.4.5.tar.gz", hash = "sha256:f51ce4ffc1d1f2816939add8524f206c376a42c87a5fca3091ce26725b3b1bca", size = 30666, upload-time = "2024-08-24T18:16:48.654Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/88/20/2cfe598ead23a715a00beb716477cfddd3e5948cf203c372d02221e5b0c6/cron_descriptor-1.4.5-py3-none-any.whl", hash = "sha256:736b3ae9d1a99bc3dbfc5b55b5e6e7c12031e7ba5de716625772f8b02dcd6013", size = 50370, upload-time = "2024-08-24T18:16:46.783Z" },
]
[[package]]
name = "distlib"
version = "0.3.9"
@ -151,6 +290,23 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/09/50/4fa3a907be1a49f5ad3b7cd67944d4b91186ef6743fe0fd401c160ba6341/django_allauth-65.8.0.tar.gz", hash = "sha256:9da589d99d412740629333a01865a90c95c97e0fae0cde789aa45a8fda90e83b", size = 1679978, upload-time = "2025-05-08T19:31:27.975Z" }
[[package]]
name = "django-celery-beat"
version = "2.8.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "celery" },
{ name = "cron-descriptor" },
{ name = "django" },
{ name = "django-timezone-field" },
{ name = "python-crontab" },
{ name = "tzdata" },
]
sdist = { url = "https://files.pythonhosted.org/packages/aa/11/0c8b412869b4fda72828572068312b10aafe7ccef7b41af3633af31f9d4b/django_celery_beat-2.8.1.tar.gz", hash = "sha256:dfad0201c0ac50c91a34700ef8fa0a10ee098cc7f3375fe5debed79f2204f80a", size = 175802, upload-time = "2025-05-13T06:58:29.246Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/61/e5/3a0167044773dee989b498e9a851fc1663bea9ab879f1179f7b8a827ac10/django_celery_beat-2.8.1-py3-none-any.whl", hash = "sha256:da2b1c6939495c05a551717509d6e3b79444e114a027f7b77bf3727c2a39d171", size = 104833, upload-time = "2025-05-13T06:58:27.309Z" },
]
[[package]]
name = "django-crispy-forms"
version = "2.4"
@ -205,6 +361,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e2/65/9f5ca467d84a67c0c547f10b0ece9fd9c26c5efc818a01bf6a3d306c2a0c/django_stubs_ext-5.2.0-py3-none-any.whl", hash = "sha256:b27ae0aab970af4894ba4e9b3fcd3e03421dc8731516669659ee56122d148b23", size = 9066, upload-time = "2025-04-26T10:48:36.032Z" },
]
[[package]]
name = "django-timezone-field"
version = "7.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ba/5b/0dbe271fef3c2274b83dbcb1b19fa3dacf1f7e542382819294644e78ea8b/django_timezone_field-7.1.tar.gz", hash = "sha256:b3ef409d88a2718b566fabe10ea996f2838bc72b22d3a2900c0aa905c761380c", size = 13727, upload-time = "2025-01-11T17:49:54.486Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/09/7a808392a751a24ffa62bec00e3085a9c1a151d728c323a5bab229ea0e58/django_timezone_field-7.1-py3-none-any.whl", hash = "sha256:93914713ed882f5bccda080eda388f7006349f25930b6122e9b07bf8db49c4b4", size = 13177, upload-time = "2025-01-11T17:49:52.142Z" },
]
[[package]]
name = "filelock"
version = "3.18.0"
@ -214,6 +382,31 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" },
]
[[package]]
name = "greenlet"
version = "3.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/34/c1/a82edae11d46c0d83481aacaa1e578fea21d94a1ef400afd734d47ad95ad/greenlet-3.2.2.tar.gz", hash = "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485", size = 185797, upload-time = "2025-05-09T19:47:35.066Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/89/30/97b49779fff8601af20972a62cc4af0c497c1504dfbb3e93be218e093f21/greenlet-3.2.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59", size = 269150, upload-time = "2025-05-09T14:50:30.784Z" },
{ url = "https://files.pythonhosted.org/packages/21/30/877245def4220f684bc2e01df1c2e782c164e84b32e07373992f14a2d107/greenlet-3.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf", size = 637381, upload-time = "2025-05-09T15:24:12.893Z" },
{ url = "https://files.pythonhosted.org/packages/8e/16/adf937908e1f913856b5371c1d8bdaef5f58f251d714085abeea73ecc471/greenlet-3.2.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325", size = 651427, upload-time = "2025-05-09T15:24:51.074Z" },
{ url = "https://files.pythonhosted.org/packages/ad/49/6d79f58fa695b618654adac64e56aff2eeb13344dc28259af8f505662bb1/greenlet-3.2.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5", size = 645795, upload-time = "2025-05-09T15:29:26.673Z" },
{ url = "https://files.pythonhosted.org/packages/5a/e6/28ed5cb929c6b2f001e96b1d0698c622976cd8f1e41fe7ebc047fa7c6dd4/greenlet-3.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825", size = 648398, upload-time = "2025-05-09T14:53:36.61Z" },
{ url = "https://files.pythonhosted.org/packages/9d/70/b200194e25ae86bc57077f695b6cc47ee3118becf54130c5514456cf8dac/greenlet-3.2.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d", size = 606795, upload-time = "2025-05-09T14:53:47.039Z" },
{ url = "https://files.pythonhosted.org/packages/f8/c8/ba1def67513a941154ed8f9477ae6e5a03f645be6b507d3930f72ed508d3/greenlet-3.2.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf", size = 1117976, upload-time = "2025-05-09T15:27:06.542Z" },
{ url = "https://files.pythonhosted.org/packages/c3/30/d0e88c1cfcc1b3331d63c2b54a0a3a4a950ef202fb8b92e772ca714a9221/greenlet-3.2.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708", size = 1145509, upload-time = "2025-05-09T14:54:02.223Z" },
{ url = "https://files.pythonhosted.org/packages/90/2e/59d6491834b6e289051b252cf4776d16da51c7c6ca6a87ff97e3a50aa0cd/greenlet-3.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421", size = 296023, upload-time = "2025-05-09T14:53:24.157Z" },
{ url = "https://files.pythonhosted.org/packages/65/66/8a73aace5a5335a1cba56d0da71b7bd93e450f17d372c5b7c5fa547557e9/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418", size = 629911, upload-time = "2025-05-09T15:24:22.376Z" },
{ url = "https://files.pythonhosted.org/packages/48/08/c8b8ebac4e0c95dcc68ec99198842e7db53eda4ab3fb0a4e785690883991/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4", size = 635251, upload-time = "2025-05-09T15:24:52.205Z" },
{ url = "https://files.pythonhosted.org/packages/37/26/7db30868f73e86b9125264d2959acabea132b444b88185ba5c462cb8e571/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763", size = 632620, upload-time = "2025-05-09T15:29:28.051Z" },
{ url = "https://files.pythonhosted.org/packages/10/ec/718a3bd56249e729016b0b69bee4adea0dfccf6ca43d147ef3b21edbca16/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b", size = 628851, upload-time = "2025-05-09T14:53:38.472Z" },
{ url = "https://files.pythonhosted.org/packages/9b/9d/d1c79286a76bc62ccdc1387291464af16a4204ea717f24e77b0acd623b99/greenlet-3.2.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207", size = 593718, upload-time = "2025-05-09T14:53:48.313Z" },
{ url = "https://files.pythonhosted.org/packages/cd/41/96ba2bf948f67b245784cd294b84e3d17933597dffd3acdb367a210d1949/greenlet-3.2.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8", size = 1105752, upload-time = "2025-05-09T15:27:08.217Z" },
{ url = "https://files.pythonhosted.org/packages/68/3b/3b97f9d33c1f2eb081759da62bd6162159db260f602f048bc2f36b4c453e/greenlet-3.2.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51", size = 1125170, upload-time = "2025-05-09T14:54:04.082Z" },
{ url = "https://files.pythonhosted.org/packages/31/df/b7d17d66c8d0f578d2885a3d8f565e9e4725eacc9d3fdc946d0031c055c4/greenlet-3.2.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240", size = 269899, upload-time = "2025-05-09T14:54:01.581Z" },
]
[[package]]
name = "gunicorn"
version = "23.0.0"
@ -235,6 +428,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2b/d3/85feeba1d097b81a44bcffa6a0beab7b4dfffe78e82fc54978d3ac380736/identify-2.6.10-py2.py3-none-any.whl", hash = "sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25", size = 99101, upload-time = "2025-04-19T15:10:36.701Z" },
]
[[package]]
name = "idna"
version = "3.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
]
[[package]]
name = "iniconfig"
version = "2.1.0"
@ -244,20 +446,41 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
]
[[package]]
name = "kombu"
version = "5.5.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "amqp" },
{ name = "tzdata" },
{ name = "vine" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/0a/128b65651ed8120460fc5af754241ad595eac74993115ec0de4f2d7bc459/kombu-5.5.3.tar.gz", hash = "sha256:021a0e11fcfcd9b0260ef1fb64088c0e92beb976eb59c1dfca7ddd4ad4562ea2", size = 461784, upload-time = "2025-04-16T12:46:17.014Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/35/1407fb0b2f5b07b50cbaf97fce09ad87d3bfefbf64f7171a8651cd8d2f68/kombu-5.5.3-py3-none-any.whl", hash = "sha256:5b0dbceb4edee50aa464f59469d34b97864be09111338cfb224a10b6a163909b", size = 209921, upload-time = "2025-04-16T12:46:15.139Z" },
]
[[package]]
name = "livegraphsdjango"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "bleach", extra = ["css"] },
{ name = "celery", extra = ["sqlalchemy"] },
{ name = "crispy-bootstrap5" },
{ name = "django" },
{ name = "django-allauth" },
{ name = "django-celery-beat" },
{ name = "django-crispy-forms" },
{ name = "gunicorn" },
{ name = "numpy" },
{ name = "pandas" },
{ name = "plotly" },
{ name = "python-dotenv" },
{ name = "redis" },
{ name = "requests" },
{ name = "sqlalchemy" },
{ name = "tinycss2" },
{ name = "whitenoise" },
]
@ -277,15 +500,22 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "bleach", extras = ["css"], specifier = ">=6.2.0" },
{ name = "celery", extras = ["sqlalchemy"], specifier = ">=5.5.2" },
{ name = "crispy-bootstrap5", specifier = ">=2025.4" },
{ name = "django", specifier = ">=5.2.1" },
{ name = "django-allauth", specifier = ">=65.8.0" },
{ name = "django-celery-beat", specifier = ">=2.8.1" },
{ name = "django-crispy-forms", specifier = ">=2.4" },
{ name = "gunicorn", specifier = ">=23.0.0" },
{ name = "numpy", specifier = ">=2.2.5" },
{ name = "pandas", specifier = ">=2.2.3" },
{ name = "plotly", specifier = ">=6.1.0" },
{ name = "python-dotenv", specifier = ">=1.1.0" },
{ name = "redis", specifier = ">=6.1.0" },
{ name = "requests", specifier = ">=2.32.3" },
{ name = "sqlalchemy", specifier = ">=2.0.41" },
{ name = "tinycss2", specifier = ">=1.4.0" },
{ name = "whitenoise", specifier = ">=6.9.0" },
]
@ -502,6 +732,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" },
]
[[package]]
name = "prompt-toolkit"
version = "3.0.51"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wcwidth" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
]
[[package]]
name = "pygments"
version = "2.19.1"
@ -538,6 +780,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/be/ac/bd0608d229ec808e51a21044f3f2f27b9a37e7a0ebaca7247882e67876af/pytest_django-4.11.1-py3-none-any.whl", hash = "sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10", size = 25281, upload-time = "2025-04-03T18:56:07.678Z" },
]
[[package]]
name = "python-crontab"
version = "3.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e2/f0/25775565c133d4e29eeb607bf9ddba0075f3af36041a1844dd207881047f/python_crontab-3.2.0.tar.gz", hash = "sha256:40067d1dd39ade3460b2ad8557c7651514cd3851deffff61c5c60e1227c5c36b", size = 57001, upload-time = "2024-07-01T22:29:10.903Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3b/91/832fb3b3a1f62bd2ab4924f6be0c7736c9bc4f84d3b153b74efcf6d4e4a1/python_crontab-3.2.0-py3-none-any.whl", hash = "sha256:82cb9b6a312d41ff66fd3caf3eed7115c28c195bfb50711bc2b4b9592feb9fe5", size = 27351, upload-time = "2024-07-01T22:29:08.549Z" },
]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
@ -585,6 +839,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
]
[[package]]
name = "redis"
version = "6.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a6/af/e875d57383653e5d9065df8552de1deb7576b4d3cf3af90cde2e79ff7f65/redis-6.1.0.tar.gz", hash = "sha256:c928e267ad69d3069af28a9823a07726edf72c7e37764f43dc0123f37928c075", size = 4629300, upload-time = "2025-05-13T12:16:57.538Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/28/5f/cf36360f80ae233bd1836442f5127818cfcfc7b1846179b60b2e9a4c45c9/redis-6.1.0-py3-none-any.whl", hash = "sha256:3b72622f3d3a89df2a6041e82acd896b0e67d9f54e9bcd906d091d23ba5219f6", size = 273750, upload-time = "2025-05-13T12:16:55.661Z" },
]
[[package]]
name = "requests"
version = "2.32.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "charset-normalizer" },
{ name = "idna" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" },
]
[[package]]
name = "rich"
version = "14.0.0"
@ -641,6 +919,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
]
[[package]]
name = "sqlalchemy"
version = "2.0.41"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d3/ad/2e1c6d4f235a97eeef52d0200d8ddda16f6c4dd70ae5ad88c46963440480/sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", size = 2115491, upload-time = "2025-05-14T17:55:31.177Z" },
{ url = "https://files.pythonhosted.org/packages/cf/8d/be490e5db8400dacc89056f78a52d44b04fbf75e8439569d5b879623a53b/sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", size = 2102827, upload-time = "2025-05-14T17:55:34.921Z" },
{ url = "https://files.pythonhosted.org/packages/a0/72/c97ad430f0b0e78efaf2791342e13ffeafcbb3c06242f01a3bb8fe44f65d/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", size = 3225224, upload-time = "2025-05-14T17:50:41.418Z" },
{ url = "https://files.pythonhosted.org/packages/5e/51/5ba9ea3246ea068630acf35a6ba0d181e99f1af1afd17e159eac7e8bc2b8/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", size = 3230045, upload-time = "2025-05-14T17:51:54.722Z" },
{ url = "https://files.pythonhosted.org/packages/78/2f/8c14443b2acea700c62f9b4a8bad9e49fc1b65cfb260edead71fd38e9f19/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", size = 3159357, upload-time = "2025-05-14T17:50:43.483Z" },
{ url = "https://files.pythonhosted.org/packages/fc/b2/43eacbf6ccc5276d76cea18cb7c3d73e294d6fb21f9ff8b4eef9b42bbfd5/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", size = 3197511, upload-time = "2025-05-14T17:51:57.308Z" },
{ url = "https://files.pythonhosted.org/packages/fa/2e/677c17c5d6a004c3c45334ab1dbe7b7deb834430b282b8a0f75ae220c8eb/sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", size = 2082420, upload-time = "2025-05-14T17:55:52.69Z" },
{ url = "https://files.pythonhosted.org/packages/e9/61/e8c1b9b6307c57157d328dd8b8348ddc4c47ffdf1279365a13b2b98b8049/sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", size = 2108329, upload-time = "2025-05-14T17:55:54.495Z" },
{ url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" },
]
[[package]]
name = "sqlparse"
version = "0.5.3"
@ -662,6 +961,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/45/8c4ebc0c460e6ec38e62ab245ad3c7fc10b210116cea7c16d61602aa9558/stevedore-5.4.1-py3-none-any.whl", hash = "sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe", size = 49533, upload-time = "2025-02-20T14:03:55.849Z" },
]
[[package]]
name = "tinycss2"
version = "1.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "webencodings" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" },
]
[[package]]
name = "types-pyyaml"
version = "6.0.12.20250516"
@ -689,6 +1000,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
]
[[package]]
name = "urllib3"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" },
]
[[package]]
name = "vine"
version = "5.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" },
]
[[package]]
name = "virtualenv"
version = "20.31.2"
@ -703,6 +1032,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" },
]
[[package]]
name = "wcwidth"
version = "0.2.13"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
]
[[package]]
name = "webencodings"
version = "0.5.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
]
[[package]]
name = "whitenoise"
version = "6.9.0"