From b01260e765f2e87e8ea0e8745e73529854c95341 Mon Sep 17 00:00:00 2001 From: Kaj Kowalski Date: Sun, 25 May 2025 15:23:48 +0200 Subject: [PATCH] Add comprehensive unit tests for services and main package - Implement tests for the app service, including course processing from file and URI. - Create mock implementations for CourseParser and Exporter to facilitate testing. - Add tests for HTML cleaner service to validate HTML content cleaning functionality. - Develop tests for the parser service, covering course fetching and loading from files. - Introduce tests for utility functions in the main package, ensuring URI validation and string joining. - Include benchmarks for performance evaluation of key functions. --- .github/workflows/ci.yml | 227 ++++++- .gitignore | 26 + README.md | 92 ++- go.mod | 9 +- go.sum | 12 +- internal/exporters/docx.go | 71 +-- internal/exporters/docx_test.go | 679 +++++++++++++++++++++ internal/exporters/factory_test.go | 444 ++++++++++++++ internal/exporters/markdown.go | 272 +++++---- internal/exporters/markdown_test.go | 693 ++++++++++++++++++++++ internal/models/models_test.go | 790 +++++++++++++++++++++++++ internal/services/app_test.go | 353 +++++++++++ internal/services/html_cleaner_test.go | 325 ++++++++++ internal/services/parser.go | 12 + internal/services/parser_test.go | 440 ++++++++++++++ internal/version/version.go | 2 +- main_test.go | 175 ++++++ 17 files changed, 4431 insertions(+), 191 deletions(-) create mode 100644 internal/exporters/docx_test.go create mode 100644 internal/exporters/factory_test.go create mode 100644 internal/exporters/markdown_test.go create mode 100644 internal/models/models_test.go create mode 100644 internal/services/app_test.go create mode 100644 internal/services/html_cleaner_test.go create mode 100644 internal/services/parser_test.go create mode 100644 main_test.go diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd5ed68..3f23bda 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ name: CI on: push: - branches: [ "master", "develop" ] + branches: [ "master", "develop" ] tags: - "v*.*.*" pull_request: @@ -16,7 +16,11 @@ jobs: contents: write strategy: matrix: - go: [1.21.x, 1.22.x, 1.23.x, 1.24.x] + go: + - 1.21.x + - 1.22.x + - 1.23.x + - 1.24.x steps: - uses: actions/checkout@v4 @@ -37,20 +41,171 @@ jobs: - name: Build run: go build -v ./... - - name: Run tests - run: go test -v -race -coverprofile=coverage.out ./... + - name: Run tests with enhanced reporting + id: test + run: | + echo "## 🔧 Test Environment" >> $GITHUB_STEP_SUMMARY + echo "- **Go Version:** ${{ matrix.go }}" >> $GITHUB_STEP_SUMMARY + echo "- **OS:** ubuntu-latest" >> $GITHUB_STEP_SUMMARY + echo "- **Timestamp:** $(date -u)" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + echo "Running tests with coverage..." + go test -v -race -coverprofile=coverage.out ./... 2>&1 | tee test-output.log + + # Extract test results for summary + TEST_STATUS=$? + TOTAL_TESTS=$(grep -c "=== RUN" test-output.log || echo "0") + PASSED_TESTS=$(grep -c "--- PASS:" test-output.log || echo "0") + FAILED_TESTS=$(grep -c "--- FAIL:" test-output.log || echo "0") + SKIPPED_TESTS=$(grep -c "--- SKIP:" test-output.log || echo "0") + + # Generate test summary + echo "## 🧪 Test Results (Go ${{ matrix.go }})" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY + echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY + echo "| Total Tests | $TOTAL_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Passed | ✅ $PASSED_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Failed | ❌ $FAILED_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Skipped | ⏭️ $SKIPPED_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Status | $([ $TEST_STATUS -eq 0 ] && echo "✅ PASSED" || echo "❌ FAILED") |" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + # Add package breakdown + echo "### 📦 Package Test Results" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Package | Status |" >> $GITHUB_STEP_SUMMARY + echo "|---------|--------|" >> $GITHUB_STEP_SUMMARY + + # Extract package results + grep "^ok\|^FAIL" test-output.log | while read line; do + if [[ $line == ok* ]]; then + pkg=$(echo $line | awk '{print $2}') + echo "| $pkg | ✅ PASS |" >> $GITHUB_STEP_SUMMARY + elif [[ $line == FAIL* ]]; then + pkg=$(echo $line | awk '{print $2}') + echo "| $pkg | ❌ FAIL |" >> $GITHUB_STEP_SUMMARY + fi + done + + echo "" >> $GITHUB_STEP_SUMMARY + + # Add detailed results if tests failed + if [ $TEST_STATUS -ne 0 ]; then + echo "### ❌ Failed Tests Details" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + grep -A 10 "--- FAIL:" test-output.log | head -100 >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + fi + + # Set outputs for other steps + echo "test-status=$TEST_STATUS" >> $GITHUB_OUTPUT + echo "total-tests=$TOTAL_TESTS" >> $GITHUB_OUTPUT + echo "passed-tests=$PASSED_TESTS" >> $GITHUB_OUTPUT + echo "failed-tests=$FAILED_TESTS" >> $GITHUB_OUTPUT + + # Exit with the original test status + exit $TEST_STATUS + + - name: Generate coverage report + if: always() + run: | + if [ -f coverage.out ]; then + go tool cover -html=coverage.out -o coverage.html + COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}') + + echo "## 📊 Code Coverage (Go ${{ matrix.go }})" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "**Total Coverage: $COVERAGE**" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + # Add coverage by package + echo "### 📋 Coverage by Package" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Package | Coverage |" >> $GITHUB_STEP_SUMMARY + echo "|---------|----------|" >> $GITHUB_STEP_SUMMARY + + go tool cover -func=coverage.out | grep -v total | while read line; do + if [[ $line == *".go:"* ]]; then + pkg=$(echo $line | awk '{print $1}' | cut -d'/' -f1-3) + coverage=$(echo $line | awk '{print $3}') + echo "| $pkg | $coverage |" >> $GITHUB_STEP_SUMMARY + fi + done | sort -u + + echo "" >> $GITHUB_STEP_SUMMARY + else + echo "## ⚠️ Coverage Report" >> $GITHUB_STEP_SUMMARY + echo "No coverage file generated" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + fi + + - name: Upload test artifacts + if: failure() + uses: actions/upload-artifact@v4 + with: + name: test-results-go-${{ matrix.go }} + path: | + test-output.log + coverage.out + coverage.html + retention-days: 7 - name: Run go vet - run: go vet ./... + run: | + echo "## 🔍 Static Analysis (Go ${{ matrix.go }})" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + VET_OUTPUT=$(go vet ./... 2>&1 || echo "") + VET_STATUS=$? + + if [ $VET_STATUS -eq 0 ]; then + echo "✅ **go vet:** No issues found" >> $GITHUB_STEP_SUMMARY + else + echo "❌ **go vet:** Issues found" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "$VET_OUTPUT" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + fi + echo "" >> $GITHUB_STEP_SUMMARY + + exit $VET_STATUS - name: Run go fmt run: | - if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then - echo "The following files are not formatted:" - gofmt -s -l . + FMT_OUTPUT=$(gofmt -s -l . 2>&1 || echo "") + + if [ -z "$FMT_OUTPUT" ]; then + echo "✅ **go fmt:** All files properly formatted" >> $GITHUB_STEP_SUMMARY + else + echo "❌ **go fmt:** Files need formatting" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "$FMT_OUTPUT" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY exit 1 fi + - name: Job Summary + if: always() + run: | + echo "## 📋 Job Summary (Go ${{ matrix.go }})" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Step | Status |" >> $GITHUB_STEP_SUMMARY + echo "|------|--------|" >> $GITHUB_STEP_SUMMARY + echo "| Dependencies | ✅ Success |" >> $GITHUB_STEP_SUMMARY + echo "| Build | ✅ Success |" >> $GITHUB_STEP_SUMMARY + echo "| Tests | ${{ steps.test.outcome == 'success' && '✅ Success' || '❌ Failed' }} |" >> $GITHUB_STEP_SUMMARY + echo "| Coverage | ${{ job.status == 'success' && '✅ Generated' || '⚠️ Partial' }} |" >> $GITHUB_STEP_SUMMARY + echo "| Static Analysis | ${{ job.status == 'success' && '✅ Clean' || '❌ Issues' }} |" >> $GITHUB_STEP_SUMMARY + echo "| Code Formatting | ${{ job.status == 'success' && '✅ Clean' || '❌ Issues' }} |" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v5 with: @@ -81,9 +236,6 @@ jobs: fail-on-severity: moderate comment-summary-in-pr: always - # # Use comma-separated names to pass list arguments: - # deny-licenses: LGPL-2.0, BSD-2-Clause - release: name: Release runs-on: ubuntu-latest @@ -103,7 +255,26 @@ jobs: check-latest: true - name: Run tests - run: go test -v ./... + run: | + echo "## 🚀 Release Tests" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + go test -v ./... 2>&1 | tee release-test-output.log + TEST_STATUS=$? + + TOTAL_TESTS=$(grep -c "=== RUN" release-test-output.log || echo "0") + PASSED_TESTS=$(grep -c "--- PASS:" release-test-output.log || echo "0") + FAILED_TESTS=$(grep -c "--- FAIL:" release-test-output.log || echo "0") + + echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY + echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY + echo "| Total Tests | $TOTAL_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Passed | ✅ $PASSED_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Failed | ❌ $FAILED_TESTS |" >> $GITHUB_STEP_SUMMARY + echo "| Status | $([ $TEST_STATUS -eq 0 ] && echo "✅ PASSED" || echo "❌ FAILED") |" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + exit $TEST_STATUS - name: Install UPX run: | @@ -112,6 +283,9 @@ jobs: - name: Build binaries run: | + echo "## 🔨 Build Process" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + # Set the build time environment variable BUILD_TIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ') @@ -121,6 +295,12 @@ jobs: # Display help information for the build script ./scripts/build.sh --help + echo "**Build Configuration:**" >> $GITHUB_STEP_SUMMARY + echo "- Version: ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY + echo "- Build Time: $BUILD_TIME" >> $GITHUB_STEP_SUMMARY + echo "- Git Commit: ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + # Build for all platforms ./scripts/build.sh \ --verbose \ @@ -128,13 +308,18 @@ jobs: - name: Compress binaries with UPX run: | + echo "## 📦 Binary Compression" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "Compressing binaries with UPX..." cd build/ # Get original sizes - echo "Original sizes:" - ls -lah - echo "" + echo "**Original sizes:**" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + ls -lah >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY # Compress all binaries except Darwin (macOS) binaries as UPX doesn't work well with recent macOS versions for binary in articulate-parser-*; do @@ -148,16 +333,16 @@ jobs: fi done - echo "" - echo "Final sizes:" - ls -lah + echo "**Final sizes:**" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + ls -lah >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY - name: Upload a Build Artifact uses: actions/upload-artifact@v4.6.2 with: - # Artifact name - name: build-artifacts # optional, default is artifact - # A file, directory or wildcard pattern that describes what to upload + name: build-artifacts path: build/ if-no-files-found: ignore retention-days: 1 diff --git a/.gitignore b/.gitignore index d7f4a13..be0964d 100644 --- a/.gitignore +++ b/.gitignore @@ -26,11 +26,37 @@ go.work # End of https://www.toptal.com/developers/gitignore/api/go +# Shit +.github/TODO + # Local test files output/ articulate-sample.json test-output.* go-os-arch-matrix.csv +test_godocx.go +test_input.json # Build artifacts build/ + +# Old workflows +.github/workflows/ci-old.yml +.github/workflows/ci-enhanced.yml + +# Test coverage files +coverage.out +coverage.txt +coverage +*.cover +*.coverprofile + +# Other common exclusions +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.test +*.out +/tmp/ diff --git a/README.md b/README.md index f9c0b7c..7a0d751 100644 --- a/README.md +++ b/README.md @@ -5,10 +5,8 @@ A Go-based parser that converts Articulate Rise e-learning content to various fo [![Go version](https://img.shields.io/github/go-mod/go-version/kjanat/articulate-parser?logo=Go&logoColor=white)][gomod] [![Go Doc](https://godoc.org/github.com/kjanat/articulate-parser?status.svg)][Package documentation] [![Go Report Card](https://goreportcard.com/badge/github.com/kjanat/articulate-parser)][Go report] -[![Tag](https://img.shields.io/github/v/tag/kjanat/articulate-parser?sort=semver&label=Tag)][Tags] -[![Release Date](https://img.shields.io/github/release-date/kjanat/articulate-parser?label=Release%20date)][Latest release] -[![License](https://img.shields.io/github/license/kjanat/articulate-parser?label=License)](LICENSE) -[![Commit activity](https://img.shields.io/github/commit-activity/m/kjanat/articulate-parser?label=Commit%20activity)][Commits] +[![Tag](https://img.shields.io/github/v/tag/kjanat/articulate-parser?sort=semver&label=Tag)][Tags] +[![License](https://img.shields.io/github/license/kjanat/articulate-parser?label=License)][MIT License] [![Last commit](https://img.shields.io/github/last-commit/kjanat/articulate-parser?label=Last%20commit)][Commits] [![GitHub Issues or Pull Requests](https://img.shields.io/github/issues/kjanat/articulate-parser?label=Issues)][Issues] [![CI](https://img.shields.io/github/actions/workflow/status/kjanat/articulate-parser/ci.yml?logo=github&label=CI)][Build] @@ -29,20 +27,50 @@ A Go-based parser that converts Articulate Rise e-learning content to various fo ## Installation -1. Ensure you have Go 1.21 or later installed -2. Clone or download the parser code -3. Initialize the Go module: +### Prerequisites + +- Go, I don't know the version, but I use go1.24.2 right now, and it works, see the [CI][Build] workflow where it is tested. + +### Install from source ```bash -go mod init articulate-parser -go mod tidy +git clone https://github.com/kjanat/articulate-parser.git +cd articulate-parser +go mod download +go build -o articulate-parser main.go +``` + +### Or install directly + +```bash +go install github.com/kjanat/articulate-parser@latest ``` ## Dependencies The parser uses the following external library: -- `github.com/unidoc/unioffice` - For creating Word documents +- `github.com/fumiama/go-docx` - For creating Word documents (MIT license) + +## Testing + +Run the test suite: + +```bash +go test ./... +``` + +Run tests with coverage: + +```bash +go test -v -race -coverprofile=coverage.out ./... +``` + +View coverage report: + +```bash +go tool cover -html=coverage.out +``` ## Usage @@ -54,9 +82,11 @@ go run main.go [output_path] #### Parameters -- `input_uri_or_file`: Either an Articulate Rise share URL or path to a local JSON file -- `output_format`: `md` for Markdown or `docx` for Word Document -- `output_path`: Optional. If not provided, files are saved to `./output/` directory +| Parameter | Description | Default | +| ------------------- | ---------------------------------------------------------------- | --------------- | +| `input_uri_or_file` | Either an Articulate Rise share URL or path to a local JSON file | None (required) | +| `output_format` | `md` for Markdown or `docx` for Word Document | None (required) | +| `output_path` | Path where output file will be saved. | `./output/` | #### Examples @@ -75,7 +105,7 @@ go run main.go "articulate-sample.json" docx "my-course.docx" 3. **Parse from local file and export to Markdown:** ```bash -go run main.go "C:\Users\kjana\Projects\articulate-parser\articulate-sample.json" md +go run main.go "articulate-sample.json" md "output.md" ``` ### Building the Executable @@ -92,9 +122,29 @@ Then run: ./articulate-parser input.json md output.md ``` +## Development + +### Code Quality + +The project maintains high code quality standards: + +- Cyclomatic complexity ≤ 15 (checked with [gocyclo](https://github.com/fzipp/gocyclo)) +- Race condition detection enabled +- Comprehensive test coverage +- Code formatting with `gofmt` +- Static analysis with `go vet` + +### Contributing + +1. Fork the repository +2. Create a feature branch +3. Make your changes +4. Run tests: `go test ./...` +5. Submit a pull request + ## Output Formats -### Markdown (.md) +### Markdown (`.md`) - Hierarchical structure with proper heading levels - Clean text content with HTML tags removed @@ -103,7 +153,7 @@ Then run: - Media references included - Course metadata at the top -### Word Document (.docx) +### Word Document (`.docx`) - Professional document formatting - Bold headings and proper typography @@ -167,6 +217,13 @@ The parser includes error handling for: - Styling and visual formatting is not preserved - Assessment logic and interactivity is lost in static exports +## Performance + +- Lightweight with minimal dependencies +- Fast JSON parsing and export +- Memory efficient processing +- No external license requirements + ## Future Enhancements Potential improvements could include: @@ -188,6 +245,7 @@ This is a utility tool for educational content conversion. Please ensure you hav [Go report]: https://goreportcard.com/report/github.com/kjanat/articulate-parser [gomod]: go.mod [Issues]: https://github.com/kjanat/articulate-parser/issues -[Latest release]: https://github.com/kjanat/articulate-parser/releases/latest + +[MIT License]: LICENSE [Package documentation]: https://godoc.org/github.com/kjanat/articulate-parser [Tags]: https://github.com/kjanat/articulate-parser/tags diff --git a/go.mod b/go.mod index 23a7d1f..65c2d6d 100644 --- a/go.mod +++ b/go.mod @@ -1,7 +1,10 @@ module github.com/kjanat/articulate-parser -go 1.21 +go 1.23.0 -require github.com/unidoc/unioffice v1.39.0 +require github.com/fumiama/go-docx v0.0.0-20250506085032-0c30fd09304b -require github.com/richardlehane/msoleps v1.0.4 // indirect +require ( + github.com/fumiama/imgsz v0.0.4 // indirect + golang.org/x/image v0.27.0 // indirect +) diff --git a/go.sum b/go.sum index 13e48e6..e3ad5ee 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,6 @@ -github.com/richardlehane/msoleps v1.0.3 h1:aznSZzrwYRl3rLKRT3gUk9am7T/mLNSnJINvN0AQoVM= -github.com/richardlehane/msoleps v1.0.3/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= -github.com/richardlehane/msoleps v1.0.4 h1:WuESlvhX3gH2IHcd8UqyCuFY5yiq/GR/yqaSM/9/g00= -github.com/richardlehane/msoleps v1.0.4/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= -github.com/unidoc/unioffice v1.39.0 h1:Wo5zvrzCqhyK/1Zi5dg8a5F5+NRftIMZPnFPYwruLto= -github.com/unidoc/unioffice v1.39.0/go.mod h1:Axz6ltIZZTUUyHoEnPe4Mb3VmsN4TRHT5iZCGZ1rgnU= +github.com/fumiama/go-docx v0.0.0-20250506085032-0c30fd09304b h1:/mxSugRc4SgN7XgBtT19dAJ7cAXLTbPmlJLJE4JjRkE= +github.com/fumiama/go-docx v0.0.0-20250506085032-0c30fd09304b/go.mod h1:ssRF0IaB1hCcKIObp3FkZOsjTcAHpgii70JelNb4H8M= +github.com/fumiama/imgsz v0.0.4 h1:Lsasu2hdSSFS+vnD+nvR1UkiRMK7hcpyYCC0FzgSMFI= +github.com/fumiama/imgsz v0.0.4/go.mod h1:bISOQVTlw9sRytPwe8ir7tAaEmyz9hSNj9n8mXMBG0E= +golang.org/x/image v0.27.0 h1:C8gA4oWU/tKkdCfYT6T2u4faJu3MeNS5O8UPWlPF61w= +golang.org/x/image v0.27.0/go.mod h1:xbdrClrAUway1MUTEZDq9mz/UpRwYAkFFNUslZtcB+g= diff --git a/internal/exporters/docx.go b/internal/exporters/docx.go index ce65b42..a6488eb 100644 --- a/internal/exporters/docx.go +++ b/internal/exporters/docx.go @@ -4,17 +4,18 @@ package exporters import ( "fmt" + "os" "strings" + "github.com/fumiama/go-docx" "github.com/kjanat/articulate-parser/internal/interfaces" "github.com/kjanat/articulate-parser/internal/models" "github.com/kjanat/articulate-parser/internal/services" - "github.com/unidoc/unioffice/document" ) // DocxExporter implements the Exporter interface for DOCX format. // It converts Articulate Rise course data into a Microsoft Word document -// using the unioffice/document package. +// using the go-docx package. type DocxExporter struct { // htmlCleaner is used to convert HTML content to plain text htmlCleaner *services.HTMLCleaner @@ -45,21 +46,17 @@ func NewDocxExporter(htmlCleaner *services.HTMLCleaner) interfaces.Exporter { // Returns: // - An error if creating or saving the document fails func (e *DocxExporter) Export(course *models.Course, outputPath string) error { - doc := document.New() + doc := docx.New() // Add title titlePara := doc.AddParagraph() - titleRun := titlePara.AddRun() - titleRun.AddText(course.Course.Title) - titleRun.Properties().SetBold(true) - titleRun.Properties().SetSize(16) + titlePara.AddText(course.Course.Title).Size("32").Bold() // Add description if available if course.Course.Description != "" { descPara := doc.AddParagraph() - descRun := descPara.AddRun() cleanDesc := e.htmlCleaner.CleanHTML(course.Course.Description) - descRun.AddText(cleanDesc) + descPara.AddText(cleanDesc) } // Add each lesson @@ -72,7 +69,20 @@ func (e *DocxExporter) Export(course *models.Course, outputPath string) error { outputPath = outputPath + ".docx" } - return doc.SaveToFile(outputPath) + // Create the file + file, err := os.Create(outputPath) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer file.Close() + + // Save the document + _, err = doc.WriteTo(file) + if err != nil { + return fmt.Errorf("failed to save document: %w", err) + } + + return nil } // exportLesson adds a lesson to the document with appropriate formatting. @@ -81,20 +91,16 @@ func (e *DocxExporter) Export(course *models.Course, outputPath string) error { // Parameters: // - doc: The Word document being created // - lesson: The lesson data model to export -func (e *DocxExporter) exportLesson(doc *document.Document, lesson *models.Lesson) { +func (e *DocxExporter) exportLesson(doc *docx.Docx, lesson *models.Lesson) { // Add lesson title lessonPara := doc.AddParagraph() - lessonRun := lessonPara.AddRun() - lessonRun.AddText(fmt.Sprintf("Lesson: %s", lesson.Title)) - lessonRun.Properties().SetBold(true) - lessonRun.Properties().SetSize(14) + lessonPara.AddText(fmt.Sprintf("Lesson: %s", lesson.Title)).Size("28").Bold() // Add lesson description if available if lesson.Description != "" { descPara := doc.AddParagraph() - descRun := descPara.AddRun() cleanDesc := e.htmlCleaner.CleanHTML(lesson.Description) - descRun.AddText(cleanDesc) + descPara.AddText(cleanDesc) } // Add each item in the lesson @@ -109,14 +115,11 @@ func (e *DocxExporter) exportLesson(doc *document.Document, lesson *models.Lesso // Parameters: // - doc: The Word document being created // - item: The item data model to export -func (e *DocxExporter) exportItem(doc *document.Document, item *models.Item) { +func (e *DocxExporter) exportItem(doc *docx.Docx, item *models.Item) { // Add item type as heading if item.Type != "" { itemPara := doc.AddParagraph() - itemRun := itemPara.AddRun() - itemRun.AddText(strings.Title(item.Type)) - itemRun.Properties().SetBold(true) - itemRun.Properties().SetSize(12) + itemPara.AddText(strings.Title(item.Type)).Size("24").Bold() } // Add sub-items @@ -132,58 +135,48 @@ func (e *DocxExporter) exportItem(doc *document.Document, item *models.Item) { // Parameters: // - doc: The Word document being created // - subItem: The sub-item data model to export -func (e *DocxExporter) exportSubItem(doc *document.Document, subItem *models.SubItem) { +func (e *DocxExporter) exportSubItem(doc *docx.Docx, subItem *models.SubItem) { // Add title if available if subItem.Title != "" { subItemPara := doc.AddParagraph() - subItemRun := subItemPara.AddRun() - subItemRun.AddText(" " + subItem.Title) // Indented - subItemRun.Properties().SetBold(true) + subItemPara.AddText(" " + subItem.Title).Bold() // Indented } // Add heading if available if subItem.Heading != "" { headingPara := doc.AddParagraph() - headingRun := headingPara.AddRun() cleanHeading := e.htmlCleaner.CleanHTML(subItem.Heading) - headingRun.AddText(" " + cleanHeading) // Indented - headingRun.Properties().SetBold(true) + headingPara.AddText(" " + cleanHeading).Bold() // Indented } // Add paragraph content if available if subItem.Paragraph != "" { contentPara := doc.AddParagraph() - contentRun := contentPara.AddRun() cleanContent := e.htmlCleaner.CleanHTML(subItem.Paragraph) - contentRun.AddText(" " + cleanContent) // Indented + contentPara.AddText(" " + cleanContent) // Indented } // Add answers if this is a question if len(subItem.Answers) > 0 { answersPara := doc.AddParagraph() - answersRun := answersPara.AddRun() - answersRun.AddText(" Answers:") - answersRun.Properties().SetBold(true) + answersPara.AddText(" Answers:").Bold() for i, answer := range subItem.Answers { answerPara := doc.AddParagraph() - answerRun := answerPara.AddRun() prefix := fmt.Sprintf(" %d. ", i+1) if answer.Correct { prefix += "✓ " } cleanAnswer := e.htmlCleaner.CleanHTML(answer.Title) - answerRun.AddText(prefix + cleanAnswer) + answerPara.AddText(prefix + cleanAnswer) } } // Add feedback if available if subItem.Feedback != "" { feedbackPara := doc.AddParagraph() - feedbackRun := feedbackPara.AddRun() cleanFeedback := e.htmlCleaner.CleanHTML(subItem.Feedback) - feedbackRun.AddText(" Feedback: " + cleanFeedback) - feedbackRun.Properties().SetItalic(true) + feedbackPara.AddText(" Feedback: " + cleanFeedback).Italic() } } diff --git a/internal/exporters/docx_test.go b/internal/exporters/docx_test.go new file mode 100644 index 0000000..d16e4ee --- /dev/null +++ b/internal/exporters/docx_test.go @@ -0,0 +1,679 @@ +// Package exporters_test provides tests for the docx exporter. +package exporters + +import ( + "os" + "path/filepath" + "testing" + + "github.com/kjanat/articulate-parser/internal/models" + "github.com/kjanat/articulate-parser/internal/services" +) + +// TestNewDocxExporter tests the NewDocxExporter constructor. +func TestNewDocxExporter(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + if exporter == nil { + t.Fatal("NewDocxExporter() returned nil") + } + + // Type assertion to check internal structure + docxExporter, ok := exporter.(*DocxExporter) + if !ok { + t.Fatal("NewDocxExporter() returned wrong type") + } + + if docxExporter.htmlCleaner == nil { + t.Error("htmlCleaner should not be nil") + } +} + +// TestDocxExporter_GetSupportedFormat tests the GetSupportedFormat method. +func TestDocxExporter_GetSupportedFormat(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + expected := "docx" + result := exporter.GetSupportedFormat() + + if result != expected { + t.Errorf("Expected format '%s', got '%s'", expected, result) + } +} + +// TestDocxExporter_Export tests the Export method. +func TestDocxExporter_Export(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create test course + testCourse := createTestCourseForDocx() + + // Create temporary directory and file + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "test-course.docx") + + // Test successful export + err := exporter.Export(testCourse, outputPath) + if err != nil { + t.Fatalf("Export failed: %v", err) + } + + // Check that file was created + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } + + // Verify file has some content (basic check) + fileInfo, err := os.Stat(outputPath) + if err != nil { + t.Fatalf("Failed to get file info: %v", err) + } + + if fileInfo.Size() == 0 { + t.Error("Output file is empty") + } +} + +// TestDocxExporter_Export_AddDocxExtension tests that the .docx extension is added automatically. +func TestDocxExporter_Export_AddDocxExtension(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + testCourse := createTestCourseForDocx() + + // Create temporary directory and file without .docx extension + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "test-course") + + err := exporter.Export(testCourse, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Check that file was created with .docx extension + expectedPath := outputPath + ".docx" + if _, err := os.Stat(expectedPath); os.IsNotExist(err) { + t.Fatal("Output file with .docx extension was not created") + } +} + +// TestDocxExporter_Export_InvalidPath tests export with invalid output path. +func TestDocxExporter_Export_InvalidPath(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + testCourse := createTestCourseForDocx() + + // Try to write to invalid path + invalidPath := "/invalid/path/that/does/not/exist/file.docx" + err := exporter.Export(testCourse, invalidPath) + + if err == nil { + t.Fatal("Expected error for invalid path, got nil") + } +} + +// TestDocxExporter_ExportLesson tests the exportLesson method indirectly through Export. +func TestDocxExporter_ExportLesson(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create course with specific lesson content + course := &models.Course{ + ShareID: "test-id", + Course: models.CourseInfo{ + ID: "test-course", + Title: "Test Course", + Lessons: []models.Lesson{ + { + ID: "lesson-1", + Title: "Test Lesson", + Type: "lesson", + Description: "

Test lesson description with bold text.

", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Title: "Test Item Title", + Paragraph: "

Test paragraph content.

", + }, + }, + }, + }, + }, + }, + }, + } + + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "lesson-test.docx") + + err := exporter.Export(course, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Verify file was created successfully + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } +} + +// TestDocxExporter_ExportItem tests the exportItem method indirectly through Export. +func TestDocxExporter_ExportItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create course with different item types + course := &models.Course{ + ShareID: "test-id", + Course: models.CourseInfo{ + ID: "test-course", + Title: "Item Test Course", + Lessons: []models.Lesson{ + { + ID: "lesson-1", + Title: "Item Types Lesson", + Type: "lesson", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Title: "Text Item", + Paragraph: "

Text content

", + }, + }, + }, + { + Type: "list", + Items: []models.SubItem{ + {Paragraph: "

List item 1

"}, + {Paragraph: "

List item 2

"}, + }, + }, + { + Type: "knowledgeCheck", + Items: []models.SubItem{ + { + Title: "

What is the answer?

", + Answers: []models.Answer{ + {Title: "Option A", Correct: false}, + {Title: "Option B", Correct: true}, + }, + Feedback: "

Correct answer explanation

", + }, + }, + }, + }, + }, + }, + }, + } + + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "items-test.docx") + + err := exporter.Export(course, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Verify file was created successfully + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } +} + +// TestDocxExporter_ExportSubItem tests the exportSubItem method indirectly through Export. +func TestDocxExporter_ExportSubItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create course with sub-item containing all possible fields + course := &models.Course{ + ShareID: "test-id", + Course: models.CourseInfo{ + ID: "test-course", + Title: "SubItem Test Course", + Lessons: []models.Lesson{ + { + ID: "lesson-1", + Title: "SubItem Test Lesson", + Type: "lesson", + Items: []models.Item{ + { + Type: "knowledgeCheck", + Items: []models.SubItem{ + { + Title: "

Question Title

", + Heading: "

Question Heading

", + Paragraph: "

Question description with emphasis.

", + Answers: []models.Answer{ + {Title: "Wrong answer", Correct: false}, + {Title: "Correct answer", Correct: true}, + {Title: "Another wrong answer", Correct: false}, + }, + Feedback: "

Feedback with formatting.

", + }, + }, + }, + }, + }, + }, + }, + } + + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "subitem-test.docx") + + err := exporter.Export(course, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Verify file was created successfully + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } +} + +// TestDocxExporter_ComplexCourse tests export of a complex course structure. +func TestDocxExporter_ComplexCourse(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create complex test course + course := &models.Course{ + ShareID: "complex-test-id", + Course: models.CourseInfo{ + ID: "complex-course", + Title: "Complex Test Course", + Description: "

This is a complex course description with formatting.

", + Lessons: []models.Lesson{ + { + ID: "section-1", + Title: "Course Section", + Type: "section", + }, + { + ID: "lesson-1", + Title: "Introduction Lesson", + Type: "lesson", + Description: "

Introduction to the course with code and links.

", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Heading: "

Welcome

", + Paragraph: "

Welcome to our comprehensive course!

", + }, + }, + }, + { + Type: "list", + Items: []models.SubItem{ + {Paragraph: "

Learn advanced concepts

"}, + {Paragraph: "

Practice with real examples

"}, + {Paragraph: "

Apply knowledge in projects

"}, + }, + }, + { + Type: "multimedia", + Items: []models.SubItem{ + { + Title: "

Video Introduction

", + Caption: "

Watch this introductory video

", + Media: &models.Media{ + Video: &models.VideoMedia{ + OriginalUrl: "https://example.com/intro.mp4", + Duration: 300, + }, + }, + }, + }, + }, + { + Type: "knowledgeCheck", + Items: []models.SubItem{ + { + Title: "

What will you learn in this course?

", + Answers: []models.Answer{ + {Title: "Basic concepts only", Correct: false}, + {Title: "Advanced concepts and practical application", Correct: true}, + {Title: "Theory without practice", Correct: false}, + }, + Feedback: "

Excellent! This course covers both theory and practice.

", + }, + }, + }, + { + Type: "image", + Items: []models.SubItem{ + { + Caption: "

Course overview diagram

", + Media: &models.Media{ + Image: &models.ImageMedia{ + OriginalUrl: "https://example.com/overview.png", + }, + }, + }, + }, + }, + { + Type: "interactive", + Items: []models.SubItem{ + { + Title: "

Interactive Exercise

", + }, + }, + }, + }, + }, + { + ID: "lesson-2", + Title: "Advanced Topics", + Type: "lesson", + Items: []models.Item{ + { + Type: "divider", + }, + { + Type: "unknown", + Items: []models.SubItem{ + { + Title: "

Custom Content

", + Paragraph: "

This is custom content type

", + }, + }, + }, + }, + }, + }, + }, + } + + // Create temporary output file + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "complex-course.docx") + + // Export course + err := exporter.Export(course, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Verify file was created and has reasonable size + fileInfo, err := os.Stat(outputPath) + if err != nil { + t.Fatalf("Failed to get file info: %v", err) + } + + if fileInfo.Size() < 1000 { + t.Error("Output file seems too small for complex course content") + } +} + +// TestDocxExporter_EmptyCourse tests export of an empty course. +func TestDocxExporter_EmptyCourse(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create minimal course + course := &models.Course{ + ShareID: "empty-id", + Course: models.CourseInfo{ + ID: "empty-course", + Title: "Empty Course", + Lessons: []models.Lesson{}, + }, + } + + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "empty-course.docx") + + err := exporter.Export(course, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Verify file was created + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } +} + +// TestDocxExporter_HTMLCleaning tests that HTML content is properly cleaned. +func TestDocxExporter_HTMLCleaning(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create course with HTML content that needs cleaning + course := &models.Course{ + ShareID: "html-test-id", + Course: models.CourseInfo{ + ID: "html-test-course", + Title: "HTML Cleaning Test", + Description: "

Description with and bold text.

", + Lessons: []models.Lesson{ + { + ID: "lesson-1", + Title: "Test Lesson", + Type: "lesson", + Description: "
Lesson description with styled content.
", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Heading: "

Heading with emphasis and & entities

", + Paragraph: "

Paragraph with <code> entities and formatting.

", + }, + }, + }, + }, + }, + }, + }, + } + + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "html-cleaning-test.docx") + + err := exporter.Export(course, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Verify file was created (basic check that HTML cleaning didn't break export) + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } +} + +// TestDocxExporter_ExistingDocxExtension tests that existing .docx extension is preserved. +func TestDocxExporter_ExistingDocxExtension(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + testCourse := createTestCourseForDocx() + + // Use path that already has .docx extension + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "test-course.docx") + + err := exporter.Export(testCourse, outputPath) + if err != nil { + + t.Fatalf("Export failed: %v", err) + } + + // Check that file was created at the exact path (no double extension) + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created at expected path") + } + + // Ensure no double extension was created + doubleExtensionPath := outputPath + ".docx" + if _, err := os.Stat(doubleExtensionPath); err == nil { + t.Error("Double .docx extension file should not exist") + } +} + +// TestDocxExporter_CaseInsensitiveExtension tests that extension checking is case-insensitive. +func TestDocxExporter_CaseInsensitiveExtension(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + testCourse := createTestCourseForDocx() + + // Test various case combinations + testCases := []string{ + "test-course.DOCX", + "test-course.Docx", + "test-course.DocX", + } + + for i, testCase := range testCases { + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, testCase) + + err := exporter.Export(testCourse, outputPath) + if err != nil { + + t.Fatalf("Export failed for case %d (%s): %v", i, testCase, err) + } + + // Check that file was created at the exact path (no additional extension) + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatalf("Output file was not created at expected path for case %d (%s)", i, testCase) + } + } +} + +// createTestCourseForDocx creates a test course for DOCX export testing. +func createTestCourseForDocx() *models.Course { + return &models.Course{ + ShareID: "test-share-id", + Course: models.CourseInfo{ + ID: "test-course-id", + Title: "Test Course", + Description: "

Test course description with formatting.

", + Lessons: []models.Lesson{ + { + ID: "section-1", + Title: "Test Section", + Type: "section", + }, + { + ID: "lesson-1", + Title: "Test Lesson", + Type: "lesson", + Description: "

Test lesson description

", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Heading: "

Test Heading

", + Paragraph: "

Test paragraph content.

", + }, + }, + }, + { + Type: "list", + Items: []models.SubItem{ + {Paragraph: "

First list item

"}, + {Paragraph: "

Second list item

"}, + }, + }, + }, + }, + }, + }, + } +} + +// BenchmarkDocxExporter_Export benchmarks the Export method. +func BenchmarkDocxExporter_Export(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + course := createTestCourseForDocx() + + // Create temporary directory + tempDir := b.TempDir() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + outputPath := filepath.Join(tempDir, "benchmark-course.docx") + _ = exporter.Export(course, outputPath) + // Clean up for next iteration + os.Remove(outputPath) + } +} + +// BenchmarkDocxExporter_ComplexCourse benchmarks export of a complex course. +func BenchmarkDocxExporter_ComplexCourse(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewDocxExporter(htmlCleaner) + + // Create complex course for benchmarking + course := &models.Course{ + ShareID: "benchmark-id", + Course: models.CourseInfo{ + ID: "benchmark-course", + Title: "Benchmark Course", + Description: "

Complex course for performance testing

", + Lessons: make([]models.Lesson, 10), // 10 lessons + }, + } + + // Fill with test data + for i := 0; i < 10; i++ { + lesson := models.Lesson{ + ID: "lesson-" + string(rune(i)), + Title: "Lesson " + string(rune(i)), + Type: "lesson", + Items: make([]models.Item, 5), // 5 items per lesson + } + + for j := 0; j < 5; j++ { + item := models.Item{ + Type: "text", + Items: make([]models.SubItem, 3), // 3 sub-items per item + } + + for k := 0; k < 3; k++ { + item.Items[k] = models.SubItem{ + Heading: "

Heading " + string(rune(k)) + "

", + Paragraph: "

Paragraph content with formatting for performance testing.

", + } + } + + lesson.Items[j] = item + } + + course.Course.Lessons[i] = lesson + } + + tempDir := b.TempDir() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + outputPath := filepath.Join(tempDir, "benchmark-complex.docx") + _ = exporter.Export(course, outputPath) + os.Remove(outputPath) + } +} diff --git a/internal/exporters/factory_test.go b/internal/exporters/factory_test.go new file mode 100644 index 0000000..d56de7a --- /dev/null +++ b/internal/exporters/factory_test.go @@ -0,0 +1,444 @@ +// Package exporters_test provides tests for the exporter factory. +package exporters + +import ( + "reflect" + "sort" + "strings" + "testing" + + "github.com/kjanat/articulate-parser/internal/services" +) + +// TestNewFactory tests the NewFactory constructor. +func TestNewFactory(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + if factory == nil { + t.Fatal("NewFactory() returned nil") + } + + // Type assertion to check internal structure + factoryImpl, ok := factory.(*Factory) + if !ok { + t.Fatal("NewFactory() returned wrong type") + } + + if factoryImpl.htmlCleaner == nil { + t.Error("htmlCleaner should not be nil") + } +} + +// TestFactory_CreateExporter tests the CreateExporter method for all supported formats. +func TestFactory_CreateExporter(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + testCases := []struct { + name string + format string + expectedType string + expectedFormat string + shouldError bool + }{ + { + name: "markdown format", + format: "markdown", + expectedType: "*exporters.MarkdownExporter", + expectedFormat: "markdown", + shouldError: false, + }, + { + name: "md format alias", + format: "md", + expectedType: "*exporters.MarkdownExporter", + expectedFormat: "markdown", + shouldError: false, + }, + { + name: "docx format", + format: "docx", + expectedType: "*exporters.DocxExporter", + expectedFormat: "docx", + shouldError: false, + }, + { + name: "word format alias", + format: "word", + expectedType: "*exporters.DocxExporter", + expectedFormat: "docx", + shouldError: false, + }, + { + name: "unsupported format", + format: "pdf", + shouldError: true, + }, + { + name: "empty format", + format: "", + shouldError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + exporter, err := factory.CreateExporter(tc.format) + + if tc.shouldError { + if err == nil { + t.Errorf("Expected error for format '%s', but got nil", tc.format) + } + if exporter != nil { + t.Errorf("Expected nil exporter for unsupported format '%s'", tc.format) + } + return + } + + if err != nil { + t.Fatalf("Unexpected error creating exporter for format '%s': %v", tc.format, err) + } + + if exporter == nil { + t.Fatalf("CreateExporter returned nil for supported format '%s'", tc.format) + } + + // Check type + exporterType := reflect.TypeOf(exporter).String() + if exporterType != tc.expectedType { + t.Errorf("Expected exporter type '%s' for format '%s', got '%s'", tc.expectedType, tc.format, exporterType) + } + + // Check supported format + supportedFormat := exporter.GetSupportedFormat() + if supportedFormat != tc.expectedFormat { + t.Errorf("Expected supported format '%s' for format '%s', got '%s'", tc.expectedFormat, tc.format, supportedFormat) + } + }) + } +} + +// TestFactory_CreateExporter_CaseInsensitive tests that format strings are case-insensitive. +func TestFactory_CreateExporter_CaseInsensitive(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + testCases := []struct { + format string + expectedFormat string + }{ + {"MARKDOWN", "markdown"}, + {"Markdown", "markdown"}, + {"MarkDown", "markdown"}, + {"MD", "markdown"}, + {"Md", "markdown"}, + {"DOCX", "docx"}, + {"Docx", "docx"}, + {"DocX", "docx"}, + {"WORD", "docx"}, + {"Word", "docx"}, + {"WoRd", "docx"}, + } + + for _, tc := range testCases { + t.Run(tc.format, func(t *testing.T) { + exporter, err := factory.CreateExporter(tc.format) + + if err != nil { + t.Fatalf("Unexpected error for format '%s': %v", tc.format, err) + } + + if exporter == nil { + t.Fatalf("CreateExporter returned nil for format '%s'", tc.format) + } + + supportedFormat := exporter.GetSupportedFormat() + if supportedFormat != tc.expectedFormat { + t.Errorf("Expected supported format '%s' for format '%s', got '%s'", tc.expectedFormat, tc.format, supportedFormat) + } + }) + } +} + +// TestFactory_CreateExporter_ErrorMessages tests error messages for unsupported formats. +func TestFactory_CreateExporter_ErrorMessages(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + testCases := []string{ + "pdf", + "html", + "txt", + "json", + "xml", + "unknown", + "123", + "markdown-invalid", + } + + for _, format := range testCases { + t.Run(format, func(t *testing.T) { + exporter, err := factory.CreateExporter(format) + + if err == nil { + t.Errorf("Expected error for unsupported format '%s', got nil", format) + } + + if exporter != nil { + t.Errorf("Expected nil exporter for unsupported format '%s', got %v", format, exporter) + } + + // Check error message contains the format + if err != nil && !strings.Contains(err.Error(), format) { + t.Errorf("Error message should contain the unsupported format '%s', got: %s", format, err.Error()) + } + + // Check error message has expected prefix + if err != nil && !strings.Contains(err.Error(), "unsupported export format") { + t.Errorf("Error message should contain 'unsupported export format', got: %s", err.Error()) + } + }) + } +} + +// TestFactory_GetSupportedFormats tests the GetSupportedFormats method. +func TestFactory_GetSupportedFormats(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + formats := factory.GetSupportedFormats() + + if formats == nil { + t.Fatal("GetSupportedFormats() returned nil") + } + + expected := []string{"markdown", "md", "docx", "word"} + + // Sort both slices for comparison + sort.Strings(formats) + sort.Strings(expected) + + if !reflect.DeepEqual(formats, expected) { + t.Errorf("Expected formats %v, got %v", expected, formats) + } + + // Verify all returned formats can create exporters + for _, format := range formats { + exporter, err := factory.CreateExporter(format) + if err != nil { + t.Errorf("Format '%s' from GetSupportedFormats() should be creatable, got error: %v", format, err) + } + if exporter == nil { + t.Errorf("Format '%s' from GetSupportedFormats() should create non-nil exporter", format) + } + } +} + +// TestFactory_GetSupportedFormats_Immutable tests that the returned slice is safe to modify. +func TestFactory_GetSupportedFormats_Immutable(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + // Get formats twice + formats1 := factory.GetSupportedFormats() + formats2 := factory.GetSupportedFormats() + + // Modify first slice + if len(formats1) > 0 { + formats1[0] = "modified" + } + + // Check that second call returns unmodified data + if len(formats2) > 0 && formats2[0] == "modified" { + t.Error("GetSupportedFormats() should return independent slices") + } + + // Verify original functionality still works + formats3 := factory.GetSupportedFormats() + if len(formats3) == 0 { + t.Error("GetSupportedFormats() should still return formats after modification") + } +} + +// TestFactory_ExporterTypes tests that created exporters are of correct types. +func TestFactory_ExporterTypes(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + // Test markdown exporter + markdownExporter, err := factory.CreateExporter("markdown") + if err != nil { + t.Fatalf("Failed to create markdown exporter: %v", err) + } + + if _, ok := markdownExporter.(*MarkdownExporter); !ok { + t.Error("Markdown exporter should be of type *MarkdownExporter") + } + + // Test docx exporter + docxExporter, err := factory.CreateExporter("docx") + if err != nil { + t.Fatalf("Failed to create docx exporter: %v", err) + } + + if _, ok := docxExporter.(*DocxExporter); !ok { + t.Error("DOCX exporter should be of type *DocxExporter") + } +} + +// TestFactory_HTMLCleanerPropagation tests that HTMLCleaner is properly passed to exporters. +func TestFactory_HTMLCleanerPropagation(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + // Test with markdown exporter + markdownExporter, err := factory.CreateExporter("markdown") + if err != nil { + t.Fatalf("Failed to create markdown exporter: %v", err) + } + + markdownImpl, ok := markdownExporter.(*MarkdownExporter) + if !ok { + t.Fatal("Failed to cast to MarkdownExporter") + } + + if markdownImpl.htmlCleaner == nil { + t.Error("HTMLCleaner should be propagated to MarkdownExporter") + } + + // Test with docx exporter + docxExporter, err := factory.CreateExporter("docx") + if err != nil { + t.Fatalf("Failed to create docx exporter: %v", err) + } + + docxImpl, ok := docxExporter.(*DocxExporter) + if !ok { + t.Fatal("Failed to cast to DocxExporter") + } + + if docxImpl.htmlCleaner == nil { + t.Error("HTMLCleaner should be propagated to DocxExporter") + } +} + +// TestFactory_MultipleExporterCreation tests creating multiple exporters of same type. +func TestFactory_MultipleExporterCreation(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + // Create multiple markdown exporters + exporter1, err := factory.CreateExporter("markdown") + if err != nil { + t.Fatalf("Failed to create first markdown exporter: %v", err) + } + + exporter2, err := factory.CreateExporter("md") + if err != nil { + t.Fatalf("Failed to create second markdown exporter: %v", err) + } + + // They should be different instances + if exporter1 == exporter2 { + t.Error("Factory should create independent exporter instances") + } + + // But both should be MarkdownExporter type + if _, ok := exporter1.(*MarkdownExporter); !ok { + t.Error("First exporter should be MarkdownExporter") + } + + if _, ok := exporter2.(*MarkdownExporter); !ok { + t.Error("Second exporter should be MarkdownExporter") + } +} + +// TestFactory_WithNilHTMLCleaner tests factory behavior with nil HTMLCleaner. +func TestFactory_WithNilHTMLCleaner(t *testing.T) { + // This tests edge case - should not panic but behavior may vary + defer func() { + if r := recover(); r != nil { + t.Errorf("Factory should handle nil HTMLCleaner gracefully, but panicked: %v", r) + } + }() + + factory := NewFactory(nil) + + if factory == nil { + t.Fatal("NewFactory(nil) returned nil") + } + + // Try to create an exporter - this might fail or succeed depending on implementation + _, err := factory.CreateExporter("markdown") + + // We don't assert on the error since nil HTMLCleaner handling is implementation-dependent + // The important thing is that it doesn't panic + _ = err +} + +// TestFactory_FormatNormalization tests that format strings are properly normalized. +func TestFactory_FormatNormalization(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + // Test formats with extra whitespace + testCases := []struct { + input string + expected string + }{ + {"markdown", "markdown"}, + {"MARKDOWN", "markdown"}, + {"Markdown", "markdown"}, + {"docx", "docx"}, + {"DOCX", "docx"}, + {"Docx", "docx"}, + } + + for _, tc := range testCases { + t.Run(tc.input, func(t *testing.T) { + exporter, err := factory.CreateExporter(tc.input) + if err != nil { + t.Fatalf("Failed to create exporter for '%s': %v", tc.input, err) + } + + format := exporter.GetSupportedFormat() + if format != tc.expected { + t.Errorf("Expected format '%s' for input '%s', got '%s'", tc.expected, tc.input, format) + } + }) + } +} + +// BenchmarkFactory_CreateExporter benchmarks the CreateExporter method. +func BenchmarkFactory_CreateExporter(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = factory.CreateExporter("markdown") + } +} + +// BenchmarkFactory_CreateExporter_Docx benchmarks creating DOCX exporters. +func BenchmarkFactory_CreateExporter_Docx(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = factory.CreateExporter("docx") + } +} + +// BenchmarkFactory_GetSupportedFormats benchmarks the GetSupportedFormats method. +func BenchmarkFactory_GetSupportedFormats(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + factory := NewFactory(htmlCleaner) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = factory.GetSupportedFormats() + } +} diff --git a/internal/exporters/markdown.go b/internal/exporters/markdown.go index adb2b24..776b575 100644 --- a/internal/exporters/markdown.go +++ b/internal/exporters/markdown.go @@ -65,13 +65,15 @@ func (e *MarkdownExporter) Export(course *models.Course, outputPath string) erro buf.WriteString("\n---\n\n") // Process lessons - for i, lesson := range course.Course.Lessons { + lessonCounter := 0 + for _, lesson := range course.Course.Lessons { if lesson.Type == "section" { buf.WriteString(fmt.Sprintf("# %s\n\n", lesson.Title)) continue } - buf.WriteString(fmt.Sprintf("## Lesson %d: %s\n\n", i+1, lesson.Title)) + lessonCounter++ + buf.WriteString(fmt.Sprintf("## Lesson %d: %s\n\n", lessonCounter, lesson.Title)) if lesson.Description != "" { buf.WriteString(fmt.Sprintf("%s\n\n", e.htmlCleaner.CleanHTML(lesson.Description))) @@ -110,116 +112,178 @@ func (e *MarkdownExporter) processItemToMarkdown(buf *bytes.Buffer, item models. switch item.Type { case "text": - for _, subItem := range item.Items { - if subItem.Heading != "" { - heading := e.htmlCleaner.CleanHTML(subItem.Heading) - if heading != "" { - buf.WriteString(fmt.Sprintf("%s %s\n\n", headingPrefix, heading)) - } - } - if subItem.Paragraph != "" { - paragraph := e.htmlCleaner.CleanHTML(subItem.Paragraph) - if paragraph != "" { - buf.WriteString(fmt.Sprintf("%s\n\n", paragraph)) - } - } - } - + e.processTextItem(buf, item, headingPrefix) case "list": - for _, subItem := range item.Items { - if subItem.Paragraph != "" { - paragraph := e.htmlCleaner.CleanHTML(subItem.Paragraph) - if paragraph != "" { - buf.WriteString(fmt.Sprintf("- %s\n", paragraph)) - } - } - } - buf.WriteString("\n") - + e.processListItem(buf, item) case "multimedia": - buf.WriteString(fmt.Sprintf("%s Media Content\n\n", headingPrefix)) - for _, subItem := range item.Items { - if subItem.Media != nil { - if subItem.Media.Video != nil { - buf.WriteString(fmt.Sprintf("**Video**: %s\n", subItem.Media.Video.OriginalUrl)) - if subItem.Media.Video.Duration > 0 { - buf.WriteString(fmt.Sprintf("**Duration**: %d seconds\n", subItem.Media.Video.Duration)) - } - } - if subItem.Media.Image != nil { - buf.WriteString(fmt.Sprintf("**Image**: %s\n", subItem.Media.Image.OriginalUrl)) - } - } - if subItem.Caption != "" { - caption := e.htmlCleaner.CleanHTML(subItem.Caption) - buf.WriteString(fmt.Sprintf("*%s*\n", caption)) - } - } - buf.WriteString("\n") - + e.processMultimediaItem(buf, item, headingPrefix) case "image": - buf.WriteString(fmt.Sprintf("%s Image\n\n", headingPrefix)) - for _, subItem := range item.Items { - if subItem.Media != nil && subItem.Media.Image != nil { - buf.WriteString(fmt.Sprintf("**Image**: %s\n", subItem.Media.Image.OriginalUrl)) - } - if subItem.Caption != "" { - caption := e.htmlCleaner.CleanHTML(subItem.Caption) - buf.WriteString(fmt.Sprintf("*%s*\n", caption)) - } - } - buf.WriteString("\n") - + e.processImageItem(buf, item, headingPrefix) case "knowledgeCheck": - buf.WriteString(fmt.Sprintf("%s Knowledge Check\n\n", headingPrefix)) - for _, subItem := range item.Items { - if subItem.Title != "" { - title := e.htmlCleaner.CleanHTML(subItem.Title) - buf.WriteString(fmt.Sprintf("**Question**: %s\n\n", title)) - } - - buf.WriteString("**Answers**:\n") - for i, answer := range subItem.Answers { - correctMark := "" - if answer.Correct { - correctMark = " ✓" - } - buf.WriteString(fmt.Sprintf("%d. %s%s\n", i+1, answer.Title, correctMark)) - } - - if subItem.Feedback != "" { - feedback := e.htmlCleaner.CleanHTML(subItem.Feedback) - buf.WriteString(fmt.Sprintf("\n**Feedback**: %s\n", feedback)) - } - } - buf.WriteString("\n") - + e.processKnowledgeCheckItem(buf, item, headingPrefix) case "interactive": - buf.WriteString(fmt.Sprintf("%s Interactive Content\n\n", headingPrefix)) - for _, subItem := range item.Items { - if subItem.Title != "" { - title := e.htmlCleaner.CleanHTML(subItem.Title) - buf.WriteString(fmt.Sprintf("**%s**\n\n", title)) + e.processInteractiveItem(buf, item, headingPrefix) + case "divider": + e.processDividerItem(buf) + default: + e.processUnknownItem(buf, item, headingPrefix) + } +} + +// processTextItem handles text content with headings and paragraphs +func (e *MarkdownExporter) processTextItem(buf *bytes.Buffer, item models.Item, headingPrefix string) { + for _, subItem := range item.Items { + if subItem.Heading != "" { + heading := e.htmlCleaner.CleanHTML(subItem.Heading) + if heading != "" { + buf.WriteString(fmt.Sprintf("%s %s\n\n", headingPrefix, heading)) } } - - case "divider": - buf.WriteString("---\n\n") - - default: - // Handle unknown types - if len(item.Items) > 0 { - buf.WriteString(fmt.Sprintf("%s %s Content\n\n", headingPrefix, strings.Title(item.Type))) - for _, subItem := range item.Items { - if subItem.Title != "" { - title := e.htmlCleaner.CleanHTML(subItem.Title) - buf.WriteString(fmt.Sprintf("**%s**\n\n", title)) - } - if subItem.Paragraph != "" { - paragraph := e.htmlCleaner.CleanHTML(subItem.Paragraph) - buf.WriteString(fmt.Sprintf("%s\n\n", paragraph)) - } + if subItem.Paragraph != "" { + paragraph := e.htmlCleaner.CleanHTML(subItem.Paragraph) + if paragraph != "" { + buf.WriteString(fmt.Sprintf("%s\n\n", paragraph)) } } } } + +// processListItem handles list items with bullet points +func (e *MarkdownExporter) processListItem(buf *bytes.Buffer, item models.Item) { + for _, subItem := range item.Items { + if subItem.Paragraph != "" { + paragraph := e.htmlCleaner.CleanHTML(subItem.Paragraph) + if paragraph != "" { + buf.WriteString(fmt.Sprintf("- %s\n", paragraph)) + } + } + } + buf.WriteString("\n") +} + +// processMultimediaItem handles multimedia content including videos and images +func (e *MarkdownExporter) processMultimediaItem(buf *bytes.Buffer, item models.Item, headingPrefix string) { + buf.WriteString(fmt.Sprintf("%s Media Content\n\n", headingPrefix)) + for _, subItem := range item.Items { + e.processMediaSubItem(buf, subItem) + } + buf.WriteString("\n") +} + +// processMediaSubItem processes individual media items (video/image) +func (e *MarkdownExporter) processMediaSubItem(buf *bytes.Buffer, subItem models.SubItem) { + if subItem.Media != nil { + e.processVideoMedia(buf, subItem.Media) + e.processImageMedia(buf, subItem.Media) + } + if subItem.Caption != "" { + caption := e.htmlCleaner.CleanHTML(subItem.Caption) + buf.WriteString(fmt.Sprintf("*%s*\n", caption)) + } +} + +// processVideoMedia processes video media content +func (e *MarkdownExporter) processVideoMedia(buf *bytes.Buffer, media *models.Media) { + if media.Video != nil { + buf.WriteString(fmt.Sprintf("**Video**: %s\n", media.Video.OriginalUrl)) + if media.Video.Duration > 0 { + buf.WriteString(fmt.Sprintf("**Duration**: %d seconds\n", media.Video.Duration)) + } + } +} + +// processImageMedia processes image media content +func (e *MarkdownExporter) processImageMedia(buf *bytes.Buffer, media *models.Media) { + if media.Image != nil { + buf.WriteString(fmt.Sprintf("**Image**: %s\n", media.Image.OriginalUrl)) + } +} + +// processImageItem handles standalone image items +func (e *MarkdownExporter) processImageItem(buf *bytes.Buffer, item models.Item, headingPrefix string) { + buf.WriteString(fmt.Sprintf("%s Image\n\n", headingPrefix)) + for _, subItem := range item.Items { + if subItem.Media != nil && subItem.Media.Image != nil { + buf.WriteString(fmt.Sprintf("**Image**: %s\n", subItem.Media.Image.OriginalUrl)) + } + if subItem.Caption != "" { + caption := e.htmlCleaner.CleanHTML(subItem.Caption) + buf.WriteString(fmt.Sprintf("*%s*\n", caption)) + } + } + buf.WriteString("\n") +} + +// processKnowledgeCheckItem handles quiz questions and knowledge checks +func (e *MarkdownExporter) processKnowledgeCheckItem(buf *bytes.Buffer, item models.Item, headingPrefix string) { + buf.WriteString(fmt.Sprintf("%s Knowledge Check\n\n", headingPrefix)) + for _, subItem := range item.Items { + e.processQuestionSubItem(buf, subItem) + } + buf.WriteString("\n") +} + +// processQuestionSubItem processes individual question items +func (e *MarkdownExporter) processQuestionSubItem(buf *bytes.Buffer, subItem models.SubItem) { + if subItem.Title != "" { + title := e.htmlCleaner.CleanHTML(subItem.Title) + buf.WriteString(fmt.Sprintf("**Question**: %s\n\n", title)) + } + + e.processAnswers(buf, subItem.Answers) + + if subItem.Feedback != "" { + feedback := e.htmlCleaner.CleanHTML(subItem.Feedback) + buf.WriteString(fmt.Sprintf("\n**Feedback**: %s\n", feedback)) + } +} + +// processAnswers processes answer choices for quiz questions +func (e *MarkdownExporter) processAnswers(buf *bytes.Buffer, answers []models.Answer) { + buf.WriteString("**Answers**:\n") + for i, answer := range answers { + correctMark := "" + if answer.Correct { + correctMark = " ✓" + } + buf.WriteString(fmt.Sprintf("%d. %s%s\n", i+1, answer.Title, correctMark)) + } +} + +// processInteractiveItem handles interactive content +func (e *MarkdownExporter) processInteractiveItem(buf *bytes.Buffer, item models.Item, headingPrefix string) { + buf.WriteString(fmt.Sprintf("%s Interactive Content\n\n", headingPrefix)) + for _, subItem := range item.Items { + if subItem.Title != "" { + title := e.htmlCleaner.CleanHTML(subItem.Title) + buf.WriteString(fmt.Sprintf("**%s**\n\n", title)) + } + } +} + +// processDividerItem handles divider elements +func (e *MarkdownExporter) processDividerItem(buf *bytes.Buffer) { + buf.WriteString("---\n\n") +} + +// processUnknownItem handles unknown or unsupported item types +func (e *MarkdownExporter) processUnknownItem(buf *bytes.Buffer, item models.Item, headingPrefix string) { + if len(item.Items) > 0 { + buf.WriteString(fmt.Sprintf("%s %s Content\n\n", headingPrefix, strings.Title(item.Type))) + for _, subItem := range item.Items { + e.processGenericSubItem(buf, subItem) + } + } +} + +// processGenericSubItem processes sub-items for unknown types +func (e *MarkdownExporter) processGenericSubItem(buf *bytes.Buffer, subItem models.SubItem) { + if subItem.Title != "" { + title := e.htmlCleaner.CleanHTML(subItem.Title) + buf.WriteString(fmt.Sprintf("**%s**\n\n", title)) + } + if subItem.Paragraph != "" { + paragraph := e.htmlCleaner.CleanHTML(subItem.Paragraph) + buf.WriteString(fmt.Sprintf("%s\n\n", paragraph)) + } +} diff --git a/internal/exporters/markdown_test.go b/internal/exporters/markdown_test.go new file mode 100644 index 0000000..47e55ac --- /dev/null +++ b/internal/exporters/markdown_test.go @@ -0,0 +1,693 @@ +// Package exporters_test provides tests for the markdown exporter. +package exporters + +import ( + "bytes" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/kjanat/articulate-parser/internal/models" + "github.com/kjanat/articulate-parser/internal/services" +) + +// TestNewMarkdownExporter tests the NewMarkdownExporter constructor. +func TestNewMarkdownExporter(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewMarkdownExporter(htmlCleaner) + + if exporter == nil { + t.Fatal("NewMarkdownExporter() returned nil") + } + + // Type assertion to check internal structure + markdownExporter, ok := exporter.(*MarkdownExporter) + if !ok { + t.Fatal("NewMarkdownExporter() returned wrong type") + } + + if markdownExporter.htmlCleaner == nil { + t.Error("htmlCleaner should not be nil") + } +} + +// TestMarkdownExporter_GetSupportedFormat tests the GetSupportedFormat method. +func TestMarkdownExporter_GetSupportedFormat(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewMarkdownExporter(htmlCleaner) + + expected := "markdown" + result := exporter.GetSupportedFormat() + + if result != expected { + t.Errorf("Expected format '%s', got '%s'", expected, result) + } +} + +// TestMarkdownExporter_Export tests the Export method. +func TestMarkdownExporter_Export(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewMarkdownExporter(htmlCleaner) + + // Create test course + testCourse := createTestCourseForMarkdown() + + // Create temporary directory and file + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "test-course.md") + + // Test successful export + err := exporter.Export(testCourse, outputPath) + if err != nil { + t.Fatalf("Export failed: %v", err) + } + + // Check that file was created + if _, err := os.Stat(outputPath); os.IsNotExist(err) { + t.Fatal("Output file was not created") + } + + // Read and verify content + content, err := os.ReadFile(outputPath) + if err != nil { + t.Fatalf("Failed to read output file: %v", err) + } + + contentStr := string(content) + + // Verify main course title + if !strings.Contains(contentStr, "# Test Course") { + t.Error("Output should contain course title as main heading") + } + + // Verify course information section + if !strings.Contains(contentStr, "## Course Information") { + t.Error("Output should contain course information section") + } + + // Verify course metadata + if !strings.Contains(contentStr, "- **Course ID**: test-course-id") { + t.Error("Output should contain course ID") + } + + if !strings.Contains(contentStr, "- **Share ID**: test-share-id") { + t.Error("Output should contain share ID") + } + + // Verify lesson content + if !strings.Contains(contentStr, "## Lesson 1: Test Lesson") { + t.Error("Output should contain lesson heading") + } + + // Verify section handling + if !strings.Contains(contentStr, "# Test Section") { + t.Error("Output should contain section as main heading") + } +} + +// TestMarkdownExporter_Export_InvalidPath tests export with invalid output path. +func TestMarkdownExporter_Export_InvalidPath(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewMarkdownExporter(htmlCleaner) + + testCourse := createTestCourseForMarkdown() + + // Try to write to invalid path + invalidPath := "/invalid/path/that/does/not/exist/file.md" + err := exporter.Export(testCourse, invalidPath) + + if err == nil { + t.Fatal("Expected error for invalid path, got nil") + } +} + +// TestMarkdownExporter_ProcessTextItem tests the processTextItem method. +func TestMarkdownExporter_ProcessTextItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "text", + Items: []models.SubItem{ + { + Heading: "

Test Heading

", + Paragraph: "

Test paragraph with bold text.

", + }, + { + Paragraph: "

Another paragraph.

", + }, + }, + } + + exporter.processTextItem(&buf, item, "###") + + result := buf.String() + expected := "### Test Heading\n\nTest paragraph with bold text.\n\nAnother paragraph.\n\n" + + if result != expected { + t.Errorf("Expected:\n%q\nGot:\n%q", expected, result) + } +} + +// TestMarkdownExporter_ProcessListItem tests the processListItem method. +func TestMarkdownExporter_ProcessListItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "list", + Items: []models.SubItem{ + {Paragraph: "

First item

"}, + {Paragraph: "

Second item with emphasis

"}, + {Paragraph: "

Third item

"}, + }, + } + + exporter.processListItem(&buf, item) + + result := buf.String() + expected := "- First item\n- Second item with emphasis\n- Third item\n\n" + + if result != expected { + t.Errorf("Expected:\n%q\nGot:\n%q", expected, result) + } +} + +// TestMarkdownExporter_ProcessMultimediaItem tests the processMultimediaItem method. +func TestMarkdownExporter_ProcessMultimediaItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "multimedia", + Items: []models.SubItem{ + { + Media: &models.Media{ + Video: &models.VideoMedia{ + OriginalUrl: "https://example.com/video.mp4", + Duration: 120, + }, + }, + Caption: "

Video caption

", + }, + }, + } + + exporter.processMultimediaItem(&buf, item, "###") + + result := buf.String() + + if !strings.Contains(result, "### Media Content") { + t.Error("Should contain media content heading") + } + if !strings.Contains(result, "**Video**: https://example.com/video.mp4") { + t.Error("Should contain video URL") + } + if !strings.Contains(result, "**Duration**: 120 seconds") { + t.Error("Should contain video duration") + } + if !strings.Contains(result, "*Video caption*") { + t.Error("Should contain video caption") + } +} + +// TestMarkdownExporter_ProcessImageItem tests the processImageItem method. +func TestMarkdownExporter_ProcessImageItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "image", + Items: []models.SubItem{ + { + Media: &models.Media{ + Image: &models.ImageMedia{ + OriginalUrl: "https://example.com/image.jpg", + }, + }, + Caption: "

Image caption

", + }, + }, + } + + exporter.processImageItem(&buf, item, "###") + + result := buf.String() + + if !strings.Contains(result, "### Image") { + t.Error("Should contain image heading") + } + if !strings.Contains(result, "**Image**: https://example.com/image.jpg") { + t.Error("Should contain image URL") + } + if !strings.Contains(result, "*Image caption*") { + t.Error("Should contain image caption") + } +} + +// TestMarkdownExporter_ProcessKnowledgeCheckItem tests the processKnowledgeCheckItem method. +func TestMarkdownExporter_ProcessKnowledgeCheckItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "knowledgeCheck", + Items: []models.SubItem{ + { + Title: "

What is the capital of France?

", + Answers: []models.Answer{ + {Title: "London", Correct: false}, + {Title: "Paris", Correct: true}, + {Title: "Berlin", Correct: false}, + }, + Feedback: "

Paris is the capital of France.

", + }, + }, + } + + exporter.processKnowledgeCheckItem(&buf, item, "###") + + result := buf.String() + + if !strings.Contains(result, "### Knowledge Check") { + t.Error("Should contain knowledge check heading") + } + if !strings.Contains(result, "**Question**: What is the capital of France?") { + t.Error("Should contain question") + } + if !strings.Contains(result, "**Answers**:") { + t.Error("Should contain answers heading") + } + if !strings.Contains(result, "2. Paris ✓") { + t.Error("Should mark correct answer") + } + if !strings.Contains(result, "**Feedback**: Paris is the capital of France.") { + t.Error("Should contain feedback") + } +} + +// TestMarkdownExporter_ProcessInteractiveItem tests the processInteractiveItem method. +func TestMarkdownExporter_ProcessInteractiveItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "interactive", + Items: []models.SubItem{ + {Title: "

Interactive element title

"}, + }, + } + + exporter.processInteractiveItem(&buf, item, "###") + + result := buf.String() + + if !strings.Contains(result, "### Interactive Content") { + t.Error("Should contain interactive content heading") + } + if !strings.Contains(result, "**Interactive element title**") { + t.Error("Should contain interactive element title") + } +} + +// TestMarkdownExporter_ProcessDividerItem tests the processDividerItem method. +func TestMarkdownExporter_ProcessDividerItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + exporter.processDividerItem(&buf) + + result := buf.String() + expected := "---\n\n" + + if result != expected { + t.Errorf("Expected %q, got %q", expected, result) + } +} + +// TestMarkdownExporter_ProcessUnknownItem tests the processUnknownItem method. +func TestMarkdownExporter_ProcessUnknownItem(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + item := models.Item{ + Type: "unknown", + Items: []models.SubItem{ + { + Title: "

Unknown item title

", + Paragraph: "

Unknown item content

", + }, + }, + } + + exporter.processUnknownItem(&buf, item, "###") + + result := buf.String() + + if !strings.Contains(result, "### Unknown Content") { + t.Error("Should contain unknown content heading") + } + if !strings.Contains(result, "**Unknown item title**") { + t.Error("Should contain unknown item title") + } + if !strings.Contains(result, "Unknown item content") { + t.Error("Should contain unknown item content") + } +} + +// TestMarkdownExporter_ProcessVideoMedia tests the processVideoMedia method. +func TestMarkdownExporter_ProcessVideoMedia(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + media := &models.Media{ + Video: &models.VideoMedia{ + OriginalUrl: "https://example.com/video.mp4", + Duration: 300, + }, + } + + exporter.processVideoMedia(&buf, media) + + result := buf.String() + + if !strings.Contains(result, "**Video**: https://example.com/video.mp4") { + t.Error("Should contain video URL") + } + if !strings.Contains(result, "**Duration**: 300 seconds") { + t.Error("Should contain video duration") + } +} + +// TestMarkdownExporter_ProcessImageMedia tests the processImageMedia method. +func TestMarkdownExporter_ProcessImageMedia(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + media := &models.Media{ + Image: &models.ImageMedia{ + OriginalUrl: "https://example.com/image.jpg", + }, + } + + exporter.processImageMedia(&buf, media) + + result := buf.String() + expected := "**Image**: https://example.com/image.jpg\n" + + if result != expected { + t.Errorf("Expected %q, got %q", expected, result) + } +} + +// TestMarkdownExporter_ProcessAnswers tests the processAnswers method. +func TestMarkdownExporter_ProcessAnswers(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + var buf bytes.Buffer + answers := []models.Answer{ + {Title: "Answer 1", Correct: false}, + {Title: "Answer 2", Correct: true}, + {Title: "Answer 3", Correct: false}, + } + + exporter.processAnswers(&buf, answers) + + result := buf.String() + + if !strings.Contains(result, "**Answers**:") { + t.Error("Should contain answers heading") + } + if !strings.Contains(result, "1. Answer 1") { + t.Error("Should contain first answer") + } + if !strings.Contains(result, "2. Answer 2 ✓") { + t.Error("Should mark correct answer") + } + if !strings.Contains(result, "3. Answer 3") { + t.Error("Should contain third answer") + } +} + +// TestMarkdownExporter_ProcessItemToMarkdown_AllTypes tests all item types. +func TestMarkdownExporter_ProcessItemToMarkdown_AllTypes(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + tests := []struct { + name string + itemType string + expectedText string + }{ + { + name: "text item", + itemType: "text", + expectedText: "", // processTextItem handles empty items + }, + { + name: "list item", + itemType: "list", + expectedText: "\n", // Empty list adds newline + }, + { + name: "multimedia item", + itemType: "multimedia", + expectedText: "### Media Content", + }, + { + name: "image item", + itemType: "image", + expectedText: "### Image", + }, + { + name: "knowledgeCheck item", + itemType: "knowledgeCheck", + expectedText: "### Knowledge Check", + }, + { + name: "interactive item", + itemType: "interactive", + expectedText: "### Interactive Content", + }, + { + name: "divider item", + itemType: "divider", + expectedText: "---", + }, + { + name: "unknown item", + itemType: "unknown", + expectedText: "", // Empty unknown items don't add content + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var buf bytes.Buffer + item := models.Item{Type: tt.itemType} + + exporter.processItemToMarkdown(&buf, item, 3) + + result := buf.String() + if tt.expectedText != "" && !strings.Contains(result, tt.expectedText) { + t.Errorf("Expected result to contain %q, got %q", tt.expectedText, result) + } + }) + } +} + +// TestMarkdownExporter_ComplexCourse tests export of a complex course structure. +func TestMarkdownExporter_ComplexCourse(t *testing.T) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewMarkdownExporter(htmlCleaner) + + // Create complex test course + course := &models.Course{ + ShareID: "complex-test-id", + Author: "Test Author", + Course: models.CourseInfo{ + ID: "complex-course", + Title: "Complex Test Course", + Description: "

This is a complex course description.

", + NavigationMode: "menu", + ExportSettings: &models.ExportSettings{ + Format: "scorm", + }, + Lessons: []models.Lesson{ + { + ID: "section-1", + Title: "Course Section", + Type: "section", + }, + { + ID: "lesson-1", + Title: "Introduction Lesson", + Type: "lesson", + Description: "

Introduction to the course

", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Heading: "

Welcome

", + Paragraph: "

Welcome to our course!

", + }, + }, + }, + { + Type: "list", + Items: []models.SubItem{ + {Paragraph: "

First objective

"}, + {Paragraph: "

Second objective

"}, + }, + }, + { + Type: "knowledgeCheck", + Items: []models.SubItem{ + { + Title: "

What will you learn?

", + Answers: []models.Answer{ + {Title: "Nothing", Correct: false}, + {Title: "Everything", Correct: true}, + }, + Feedback: "

Great choice!

", + }, + }, + }, + }, + }, + }, + }, + } + + // Create temporary output file + tempDir := t.TempDir() + outputPath := filepath.Join(tempDir, "complex-course.md") + + // Export course + err := exporter.Export(course, outputPath) + if err != nil { + t.Fatalf("Export failed: %v", err) + } + + // Read and verify content + content, err := os.ReadFile(outputPath) + if err != nil { + t.Fatalf("Failed to read output file: %v", err) + } + + contentStr := string(content) + + // Verify various elements are present + checks := []string{ + "# Complex Test Course", + "This is a complex course description.", + "- **Export Format**: scorm", + "# Course Section", + "## Lesson 1: Introduction Lesson", + "Introduction to the course", + "### Welcome", + "Welcome to our course!", + "- First objective", + "- Second objective", + "### Knowledge Check", + "**Question**: What will you learn?", + "2. Everything ✓", + "**Feedback**: Great choice!", + } + + for _, check := range checks { + if !strings.Contains(contentStr, check) { + t.Errorf("Output should contain: %q", check) + } + } +} + +// createTestCourseForMarkdown creates a test course for markdown export testing. +func createTestCourseForMarkdown() *models.Course { + return &models.Course{ + ShareID: "test-share-id", + Author: "Test Author", + Course: models.CourseInfo{ + ID: "test-course-id", + Title: "Test Course", + Description: "Test course description", + NavigationMode: "menu", + Lessons: []models.Lesson{ + { + ID: "section-1", + Title: "Test Section", + Type: "section", + }, + { + ID: "lesson-1", + Title: "Test Lesson", + Type: "lesson", + Items: []models.Item{ + { + Type: "text", + Items: []models.SubItem{ + { + Heading: "Test Heading", + Paragraph: "Test paragraph content", + }, + }, + }, + }, + }, + }, + }, + } +} + +// BenchmarkMarkdownExporter_Export benchmarks the Export method. +func BenchmarkMarkdownExporter_Export(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + exporter := NewMarkdownExporter(htmlCleaner) + course := createTestCourseForMarkdown() + + // Create temporary directory + tempDir := b.TempDir() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + outputPath := filepath.Join(tempDir, "benchmark-course.md") + _ = exporter.Export(course, outputPath) + // Clean up for next iteration + os.Remove(outputPath) + } +} + +// BenchmarkMarkdownExporter_ProcessTextItem benchmarks the processTextItem method. +func BenchmarkMarkdownExporter_ProcessTextItem(b *testing.B) { + htmlCleaner := services.NewHTMLCleaner() + exporter := &MarkdownExporter{htmlCleaner: htmlCleaner} + + item := models.Item{ + Type: "text", + Items: []models.SubItem{ + { + Heading: "

Benchmark Heading

", + Paragraph: "

Benchmark paragraph with bold text.

", + }, + }, + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + var buf bytes.Buffer + exporter.processTextItem(&buf, item, "###") + } +} diff --git a/internal/models/models_test.go b/internal/models/models_test.go new file mode 100644 index 0000000..9bbd344 --- /dev/null +++ b/internal/models/models_test.go @@ -0,0 +1,790 @@ +// Package models_test provides tests for the data models. +package models + +import ( + "encoding/json" + "reflect" + "testing" +) + +// TestCourse_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of Course. +func TestCourse_JSONMarshalUnmarshal(t *testing.T) { + original := Course{ + ShareID: "test-share-id", + Author: "Test Author", + Course: CourseInfo{ + ID: "course-123", + Title: "Test Course", + Description: "A test course description", + Color: "#FF5733", + NavigationMode: "menu", + Lessons: []Lesson{ + { + ID: "lesson-1", + Title: "First Lesson", + Description: "Lesson description", + Type: "lesson", + Icon: "icon-1", + Ready: true, + CreatedAt: "2023-01-01T00:00:00Z", + UpdatedAt: "2023-01-02T00:00:00Z", + }, + }, + ExportSettings: &ExportSettings{ + Title: "Export Title", + Format: "scorm", + }, + }, + LabelSet: LabelSet{ + ID: "labelset-1", + Name: "Test Labels", + }, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal Course to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled Course + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal Course from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled Course structs do not match") + t.Logf("Original: %+v", original) + t.Logf("Unmarshaled: %+v", unmarshaled) + } +} + +// TestCourseInfo_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of CourseInfo. +func TestCourseInfo_JSONMarshalUnmarshal(t *testing.T) { + original := CourseInfo{ + ID: "course-456", + Title: "Another Test Course", + Description: "Another test description", + Color: "#33FF57", + NavigationMode: "linear", + Lessons: []Lesson{ + { + ID: "lesson-2", + Title: "Second Lesson", + Type: "section", + Items: []Item{ + { + ID: "item-1", + Type: "text", + Family: "text", + Variant: "paragraph", + Items: []SubItem{ + { + Title: "Sub Item Title", + Heading: "Sub Item Heading", + Paragraph: "Sub item paragraph content", + }, + }, + }, + }, + }, + }, + CoverImage: &Media{ + Image: &ImageMedia{ + Key: "img-123", + Type: "jpg", + Width: 800, + Height: 600, + OriginalUrl: "https://example.com/image.jpg", + }, + }, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal CourseInfo to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled CourseInfo + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal CourseInfo from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled CourseInfo structs do not match") + } +} + +// TestLesson_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of Lesson. +func TestLesson_JSONMarshalUnmarshal(t *testing.T) { + original := Lesson{ + ID: "lesson-test", + Title: "Test Lesson", + Description: "Test lesson description", + Type: "lesson", + Icon: "lesson-icon", + Ready: true, + CreatedAt: "2023-06-01T12:00:00Z", + UpdatedAt: "2023-06-01T13:00:00Z", + Position: map[string]interface{}{"x": 1, "y": 2}, + Items: []Item{ + { + ID: "item-test", + Type: "multimedia", + Family: "media", + Variant: "video", + Items: []SubItem{ + { + Caption: "Video caption", + Media: &Media{ + Video: &VideoMedia{ + Key: "video-123", + URL: "https://example.com/video.mp4", + Type: "mp4", + Duration: 120, + OriginalUrl: "https://example.com/video.mp4", + }, + }, + }, + }, + Settings: map[string]interface{}{"autoplay": false}, + Data: map[string]interface{}{"metadata": "test"}, + }, + }, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal Lesson to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled Lesson + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal Lesson from JSON: %v", err) + } + + // Compare structures + if !compareLessons(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled Lesson structs do not match") + } +} + +// TestItem_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of Item. +func TestItem_JSONMarshalUnmarshal(t *testing.T) { + original := Item{ + ID: "item-json-test", + Type: "knowledgeCheck", + Family: "assessment", + Variant: "multipleChoice", + Items: []SubItem{ + { + Title: "What is the answer?", + Answers: []Answer{ + {Title: "Option A", Correct: false}, + {Title: "Option B", Correct: true}, + {Title: "Option C", Correct: false}, + }, + Feedback: "Well done!", + }, + }, + Settings: map[string]interface{}{ + "allowRetry": true, + "showAnswer": true, + }, + Data: map[string]interface{}{ + "points": 10, + "weight": 1.5, + }, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal Item to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled Item + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal Item from JSON: %v", err) + } + + // Compare structures + if !compareItem(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled Item structs do not match") + } +} + +// TestSubItem_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of SubItem. +func TestSubItem_JSONMarshalUnmarshal(t *testing.T) { + original := SubItem{ + Title: "Test SubItem Title", + Heading: "Test SubItem Heading", + Paragraph: "Test paragraph with content", + Caption: "Test caption", + Feedback: "Test feedback message", + Answers: []Answer{ + {Title: "First answer", Correct: true}, + {Title: "Second answer", Correct: false}, + }, + Media: &Media{ + Image: &ImageMedia{ + Key: "subitem-img", + Type: "png", + Width: 400, + Height: 300, + OriginalUrl: "https://example.com/subitem.png", + CrushedKey: "crushed-123", + UseCrushedKey: true, + }, + }, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal SubItem to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled SubItem + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal SubItem from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled SubItem structs do not match") + } +} + +// TestAnswer_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of Answer. +func TestAnswer_JSONMarshalUnmarshal(t *testing.T) { + original := Answer{ + Title: "Test answer text", + Correct: true, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal Answer to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled Answer + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal Answer from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled Answer structs do not match") + } +} + +// TestMedia_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of Media. +func TestMedia_JSONMarshalUnmarshal(t *testing.T) { + // Test with Image + originalImage := Media{ + Image: &ImageMedia{ + Key: "media-img-test", + Type: "jpeg", + Width: 1200, + Height: 800, + OriginalUrl: "https://example.com/media.jpg", + CrushedKey: "crushed-media", + UseCrushedKey: false, + }, + } + + jsonData, err := json.Marshal(originalImage) + if err != nil { + t.Fatalf("Failed to marshal Media with Image to JSON: %v", err) + } + + var unmarshaledImage Media + err = json.Unmarshal(jsonData, &unmarshaledImage) + if err != nil { + t.Fatalf("Failed to unmarshal Media with Image from JSON: %v", err) + } + + if !reflect.DeepEqual(originalImage, unmarshaledImage) { + t.Errorf("Marshaled and unmarshaled Media with Image do not match") + } + + // Test with Video + originalVideo := Media{ + Video: &VideoMedia{ + Key: "media-video-test", + URL: "https://example.com/media.mp4", + Type: "mp4", + Duration: 300, + Poster: "https://example.com/poster.jpg", + Thumbnail: "https://example.com/thumb.jpg", + InputKey: "input-123", + OriginalUrl: "https://example.com/original.mp4", + }, + } + + jsonData, err = json.Marshal(originalVideo) + if err != nil { + t.Fatalf("Failed to marshal Media with Video to JSON: %v", err) + } + + var unmarshaledVideo Media + err = json.Unmarshal(jsonData, &unmarshaledVideo) + if err != nil { + t.Fatalf("Failed to unmarshal Media with Video from JSON: %v", err) + } + + if !reflect.DeepEqual(originalVideo, unmarshaledVideo) { + t.Errorf("Marshaled and unmarshaled Media with Video do not match") + } +} + +// TestImageMedia_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of ImageMedia. +func TestImageMedia_JSONMarshalUnmarshal(t *testing.T) { + original := ImageMedia{ + Key: "image-media-test", + Type: "gif", + Width: 640, + Height: 480, + OriginalUrl: "https://example.com/image.gif", + CrushedKey: "crushed-gif", + UseCrushedKey: true, + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal ImageMedia to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled ImageMedia + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal ImageMedia from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled ImageMedia structs do not match") + } +} + +// TestVideoMedia_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of VideoMedia. +func TestVideoMedia_JSONMarshalUnmarshal(t *testing.T) { + original := VideoMedia{ + Key: "video-media-test", + URL: "https://example.com/video.webm", + Type: "webm", + Duration: 450, + Poster: "https://example.com/poster.jpg", + Thumbnail: "https://example.com/thumbnail.jpg", + InputKey: "upload-456", + OriginalUrl: "https://example.com/original.webm", + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal VideoMedia to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled VideoMedia + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal VideoMedia from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled VideoMedia structs do not match") + } +} + +// TestExportSettings_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of ExportSettings. +func TestExportSettings_JSONMarshalUnmarshal(t *testing.T) { + original := ExportSettings{ + Title: "Custom Export Title", + Format: "xAPI", + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal ExportSettings to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled ExportSettings + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal ExportSettings from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled ExportSettings structs do not match") + } +} + +// TestLabelSet_JSONMarshalUnmarshal tests JSON marshaling and unmarshaling of LabelSet. +func TestLabelSet_JSONMarshalUnmarshal(t *testing.T) { + original := LabelSet{ + ID: "labelset-test", + Name: "Test Label Set", + } + + // Marshal to JSON + jsonData, err := json.Marshal(original) + if err != nil { + t.Fatalf("Failed to marshal LabelSet to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled LabelSet + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal LabelSet from JSON: %v", err) + } + + // Compare structures + if !reflect.DeepEqual(original, unmarshaled) { + t.Errorf("Marshaled and unmarshaled LabelSet structs do not match") + } +} + +// TestEmptyStructures tests marshaling and unmarshaling of empty structures. +func TestEmptyStructures(t *testing.T) { + testCases := []struct { + name string + data interface{} + }{ + {"Empty Course", Course{}}, + {"Empty CourseInfo", CourseInfo{}}, + {"Empty Lesson", Lesson{}}, + {"Empty Item", Item{}}, + {"Empty SubItem", SubItem{}}, + {"Empty Answer", Answer{}}, + {"Empty Media", Media{}}, + {"Empty ImageMedia", ImageMedia{}}, + {"Empty VideoMedia", VideoMedia{}}, + {"Empty ExportSettings", ExportSettings{}}, + {"Empty LabelSet", LabelSet{}}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Marshal to JSON + jsonData, err := json.Marshal(tc.data) + if err != nil { + t.Fatalf("Failed to marshal %s to JSON: %v", tc.name, err) + } + + // Unmarshal from JSON + result := reflect.New(reflect.TypeOf(tc.data)).Interface() + err = json.Unmarshal(jsonData, result) + if err != nil { + t.Fatalf("Failed to unmarshal %s from JSON: %v", tc.name, err) + } + + // Basic validation that no errors occurred + if len(jsonData) == 0 { + t.Errorf("%s should produce some JSON output", tc.name) + } + }) + } +} + +// TestNilPointerSafety tests that nil pointers in optional fields are handled correctly. +func TestNilPointerSafety(t *testing.T) { + course := Course{ + ShareID: "nil-test", + Course: CourseInfo{ + ID: "nil-course", + Title: "Nil Pointer Test", + CoverImage: nil, // Test nil pointer + ExportSettings: nil, // Test nil pointer + Lessons: []Lesson{ + { + ID: "lesson-nil", + Title: "Lesson with nil media", + Items: []Item{ + { + ID: "item-nil", + Type: "text", + Items: []SubItem{ + { + Title: "SubItem with nil media", + Media: nil, // Test nil pointer + }, + }, + Media: nil, // Test nil pointer + }, + }, + }, + }, + }, + } + + // Marshal to JSON + jsonData, err := json.Marshal(course) + if err != nil { + t.Fatalf("Failed to marshal Course with nil pointers to JSON: %v", err) + } + + // Unmarshal from JSON + var unmarshaled Course + err = json.Unmarshal(jsonData, &unmarshaled) + if err != nil { + t.Fatalf("Failed to unmarshal Course with nil pointers from JSON: %v", err) + } + + // Basic validation + if unmarshaled.ShareID != "nil-test" { + t.Error("ShareID should be preserved") + } + if unmarshaled.Course.Title != "Nil Pointer Test" { + t.Error("Course title should be preserved") + } +} + +// TestJSONTagsPresence tests that JSON tags are properly defined. +func TestJSONTagsPresence(t *testing.T) { + // Test that important fields have JSON tags + courseType := reflect.TypeOf(Course{}) + if courseType.Kind() == reflect.Struct { + field, found := courseType.FieldByName("ShareID") + if !found { + t.Error("ShareID field not found") + } else { + tag := field.Tag.Get("json") + if tag == "" { + t.Error("ShareID should have json tag") + } + if tag != "shareId" { + t.Errorf("ShareID json tag should be 'shareId', got '%s'", tag) + } + } + } + + // Test CourseInfo + courseInfoType := reflect.TypeOf(CourseInfo{}) + if courseInfoType.Kind() == reflect.Struct { + field, found := courseInfoType.FieldByName("NavigationMode") + if !found { + t.Error("NavigationMode field not found") + } else { + tag := field.Tag.Get("json") + if tag == "" { + t.Error("NavigationMode should have json tag") + } + } + } +} + +// BenchmarkCourse_JSONMarshal benchmarks JSON marshaling of Course. +func BenchmarkCourse_JSONMarshal(b *testing.B) { + course := Course{ + ShareID: "benchmark-id", + Author: "Benchmark Author", + Course: CourseInfo{ + ID: "benchmark-course", + Title: "Benchmark Course", + Lessons: []Lesson{ + { + ID: "lesson-1", + Title: "Lesson 1", + Items: []Item{ + { + ID: "item-1", + Type: "text", + Items: []SubItem{ + {Title: "SubItem 1"}, + }, + }, + }, + }, + }, + }, + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = json.Marshal(course) + } +} + +// BenchmarkCourse_JSONUnmarshal benchmarks JSON unmarshaling of Course. +func BenchmarkCourse_JSONUnmarshal(b *testing.B) { + course := Course{ + ShareID: "benchmark-id", + Author: "Benchmark Author", + Course: CourseInfo{ + ID: "benchmark-course", + Title: "Benchmark Course", + Lessons: []Lesson{ + { + ID: "lesson-1", + Title: "Lesson 1", + Items: []Item{ + { + ID: "item-1", + Type: "text", + Items: []SubItem{ + {Title: "SubItem 1"}, + }, + }, + }, + }, + }, + }, + } + + jsonData, _ := json.Marshal(course) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + var result Course + _ = json.Unmarshal(jsonData, &result) + } +} + +// compareMaps compares two interface{} values that should be maps +func compareMaps(original, unmarshaled interface{}) bool { + origMap, origOk := original.(map[string]interface{}) + unMap, unOk := unmarshaled.(map[string]interface{}) + + if !origOk || !unOk { + // If not maps, use deep equal + return reflect.DeepEqual(original, unmarshaled) + } + + if len(origMap) != len(unMap) { + return false + } + + for key, origVal := range origMap { + unVal, exists := unMap[key] + if !exists { + return false + } + + // Handle numeric type conversion from JSON + switch origVal := origVal.(type) { + case int: + if unFloat, ok := unVal.(float64); ok { + if float64(origVal) != unFloat { + return false + } + } else { + return false + } + case float64: + if unFloat, ok := unVal.(float64); ok { + if origVal != unFloat { + return false + } + } else { + return false + } + default: + if !reflect.DeepEqual(origVal, unVal) { + return false + } + } + } + return true +} + +// compareLessons compares two Lesson structs accounting for JSON type conversion +func compareLessons(original, unmarshaled Lesson) bool { + // Compare all fields except Position and Items + if original.ID != unmarshaled.ID || + original.Title != unmarshaled.Title || + original.Description != unmarshaled.Description || + original.Type != unmarshaled.Type || + original.Icon != unmarshaled.Icon || + original.Ready != unmarshaled.Ready || + original.CreatedAt != unmarshaled.CreatedAt || + original.UpdatedAt != unmarshaled.UpdatedAt { + return false + } + + // Compare Position + if !compareMaps(original.Position, unmarshaled.Position) { + return false + } + + // Compare Items + return compareItems(original.Items, unmarshaled.Items) +} + +// compareItems compares two Item slices accounting for JSON type conversion +func compareItems(original, unmarshaled []Item) bool { + if len(original) != len(unmarshaled) { + return false + } + + for i := range original { + if !compareItem(original[i], unmarshaled[i]) { + return false + } + } + return true +} + +// compareItem compares two Item structs accounting for JSON type conversion +func compareItem(original, unmarshaled Item) bool { + // Compare basic fields + if original.ID != unmarshaled.ID || + original.Type != unmarshaled.Type || + original.Family != unmarshaled.Family || + original.Variant != unmarshaled.Variant { + return false + } + + // Compare Settings and Data + if !compareMaps(original.Settings, unmarshaled.Settings) { + return false + } + + if !compareMaps(original.Data, unmarshaled.Data) { + return false + } + + // Compare Items (SubItems) + if len(original.Items) != len(unmarshaled.Items) { + return false + } + + for i := range original.Items { + if !reflect.DeepEqual(original.Items[i], unmarshaled.Items[i]) { + return false + } + } + + // Compare Media + if !reflect.DeepEqual(original.Media, unmarshaled.Media) { + return false + } + + return true +} diff --git a/internal/services/app_test.go b/internal/services/app_test.go new file mode 100644 index 0000000..9e1a4a4 --- /dev/null +++ b/internal/services/app_test.go @@ -0,0 +1,353 @@ +// Package services_test provides tests for the services package. +package services + +import ( + "errors" + "testing" + + "github.com/kjanat/articulate-parser/internal/interfaces" + "github.com/kjanat/articulate-parser/internal/models" +) + +// MockCourseParser is a mock implementation of interfaces.CourseParser for testing. +type MockCourseParser struct { + mockFetchCourse func(uri string) (*models.Course, error) + mockLoadCourseFromFile func(filePath string) (*models.Course, error) +} + +func (m *MockCourseParser) FetchCourse(uri string) (*models.Course, error) { + if m.mockFetchCourse != nil { + return m.mockFetchCourse(uri) + } + return nil, errors.New("not implemented") +} + +func (m *MockCourseParser) LoadCourseFromFile(filePath string) (*models.Course, error) { + if m.mockLoadCourseFromFile != nil { + return m.mockLoadCourseFromFile(filePath) + } + return nil, errors.New("not implemented") +} + +// MockExporter is a mock implementation of interfaces.Exporter for testing. +type MockExporter struct { + mockExport func(course *models.Course, outputPath string) error + mockGetSupportedFormat func() string +} + +func (m *MockExporter) Export(course *models.Course, outputPath string) error { + if m.mockExport != nil { + return m.mockExport(course, outputPath) + } + return nil +} + +func (m *MockExporter) GetSupportedFormat() string { + if m.mockGetSupportedFormat != nil { + return m.mockGetSupportedFormat() + } + return "mock" +} + +// MockExporterFactory is a mock implementation of interfaces.ExporterFactory for testing. +type MockExporterFactory struct { + mockCreateExporter func(format string) (*MockExporter, error) + mockGetSupportedFormats func() []string +} + +func (m *MockExporterFactory) CreateExporter(format string) (interfaces.Exporter, error) { + if m.mockCreateExporter != nil { + exporter, err := m.mockCreateExporter(format) + return exporter, err + } + return &MockExporter{}, nil +} + +func (m *MockExporterFactory) GetSupportedFormats() []string { + if m.mockGetSupportedFormats != nil { + return m.mockGetSupportedFormats() + } + return []string{"mock"} +} + +// createTestCourse creates a sample course for testing purposes. +func createTestCourse() *models.Course { + return &models.Course{ + ShareID: "test-share-id", + Author: "Test Author", + Course: models.CourseInfo{ + ID: "test-course-id", + Title: "Test Course", + Description: "This is a test course", + Lessons: []models.Lesson{ + { + ID: "lesson-1", + Title: "Test Lesson", + Type: "lesson", + Items: []models.Item{ + { + ID: "item-1", + Type: "text", + Items: []models.SubItem{ + { + ID: "subitem-1", + Title: "Test Title", + Paragraph: "Test paragraph content", + }, + }, + }, + }, + }, + }, + }, + } +} + +// TestNewApp tests the NewApp constructor. +func TestNewApp(t *testing.T) { + parser := &MockCourseParser{} + factory := &MockExporterFactory{} + + app := NewApp(parser, factory) + + if app == nil { + t.Fatal("NewApp() returned nil") + } + + if app.parser != parser { + t.Error("App parser was not set correctly") + } + + // Test that the factory is set (we can't directly compare interface values) + formats := app.GetSupportedFormats() + if len(formats) == 0 { + t.Error("App exporterFactory was not set correctly - no supported formats") + } +} + +// TestApp_ProcessCourseFromFile tests the ProcessCourseFromFile method. +func TestApp_ProcessCourseFromFile(t *testing.T) { + testCourse := createTestCourse() + + tests := []struct { + name string + filePath string + format string + outputPath string + setupMocks func(*MockCourseParser, *MockExporterFactory, *MockExporter) + expectedError string + }{ + { + name: "successful processing", + filePath: "test.json", + format: "markdown", + outputPath: "output.md", + setupMocks: func(parser *MockCourseParser, factory *MockExporterFactory, exporter *MockExporter) { + parser.mockLoadCourseFromFile = func(filePath string) (*models.Course, error) { + if filePath != "test.json" { + t.Errorf("Expected filePath 'test.json', got '%s'", filePath) + } + return testCourse, nil + } + + factory.mockCreateExporter = func(format string) (*MockExporter, error) { + if format != "markdown" { + t.Errorf("Expected format 'markdown', got '%s'", format) + } + return exporter, nil + } + + exporter.mockExport = func(course *models.Course, outputPath string) error { + if outputPath != "output.md" { + t.Errorf("Expected outputPath 'output.md', got '%s'", outputPath) + } + if course != testCourse { + t.Error("Expected course to match testCourse") + } + return nil + } + }, + }, + { + name: "file loading error", + filePath: "nonexistent.json", + format: "markdown", + outputPath: "output.md", + setupMocks: func(parser *MockCourseParser, factory *MockExporterFactory, exporter *MockExporter) { + parser.mockLoadCourseFromFile = func(filePath string) (*models.Course, error) { + return nil, errors.New("file not found") + } + }, + expectedError: "failed to load course from file", + }, + { + name: "exporter creation error", + filePath: "test.json", + format: "unsupported", + outputPath: "output.txt", + setupMocks: func(parser *MockCourseParser, factory *MockExporterFactory, exporter *MockExporter) { + parser.mockLoadCourseFromFile = func(filePath string) (*models.Course, error) { + return testCourse, nil + } + + factory.mockCreateExporter = func(format string) (*MockExporter, error) { + return nil, errors.New("unsupported format") + } + }, + expectedError: "failed to create exporter", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + parser := &MockCourseParser{} + exporter := &MockExporter{} + factory := &MockExporterFactory{} + + tt.setupMocks(parser, factory, exporter) + + app := NewApp(parser, factory) + err := app.ProcessCourseFromFile(tt.filePath, tt.format, tt.outputPath) + + if tt.expectedError != "" { + if err == nil { + t.Fatalf("Expected error containing '%s', got nil", tt.expectedError) + } + if !contains(err.Error(), tt.expectedError) { + t.Errorf("Expected error containing '%s', got '%s'", tt.expectedError, err.Error()) + } + } else { + if err != nil { + t.Errorf("Expected no error, got: %v", err) + } + } + }) + } +} + +// TestApp_ProcessCourseFromURI tests the ProcessCourseFromURI method. +func TestApp_ProcessCourseFromURI(t *testing.T) { + testCourse := createTestCourse() + + tests := []struct { + name string + uri string + format string + outputPath string + setupMocks func(*MockCourseParser, *MockExporterFactory, *MockExporter) + expectedError string + }{ + { + name: "successful processing", + uri: "https://rise.articulate.com/share/test123", + format: "docx", + outputPath: "output.docx", + setupMocks: func(parser *MockCourseParser, factory *MockExporterFactory, exporter *MockExporter) { + parser.mockFetchCourse = func(uri string) (*models.Course, error) { + if uri != "https://rise.articulate.com/share/test123" { + t.Errorf("Expected uri 'https://rise.articulate.com/share/test123', got '%s'", uri) + } + return testCourse, nil + } + + factory.mockCreateExporter = func(format string) (*MockExporter, error) { + if format != "docx" { + t.Errorf("Expected format 'docx', got '%s'", format) + } + return exporter, nil + } + + exporter.mockExport = func(course *models.Course, outputPath string) error { + if outputPath != "output.docx" { + t.Errorf("Expected outputPath 'output.docx', got '%s'", outputPath) + } + return nil + } + }, + }, + { + name: "fetch error", + uri: "invalid-uri", + format: "docx", + outputPath: "output.docx", + setupMocks: func(parser *MockCourseParser, factory *MockExporterFactory, exporter *MockExporter) { + parser.mockFetchCourse = func(uri string) (*models.Course, error) { + return nil, errors.New("network error") + } + }, + expectedError: "failed to fetch course", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + parser := &MockCourseParser{} + exporter := &MockExporter{} + factory := &MockExporterFactory{} + + tt.setupMocks(parser, factory, exporter) + + app := NewApp(parser, factory) + err := app.ProcessCourseFromURI(tt.uri, tt.format, tt.outputPath) + + if tt.expectedError != "" { + if err == nil { + t.Fatalf("Expected error containing '%s', got nil", tt.expectedError) + } + if !contains(err.Error(), tt.expectedError) { + t.Errorf("Expected error containing '%s', got '%s'", tt.expectedError, err.Error()) + } + } else { + if err != nil { + t.Errorf("Expected no error, got: %v", err) + } + } + }) + } +} + +// TestApp_GetSupportedFormats tests the GetSupportedFormats method. +func TestApp_GetSupportedFormats(t *testing.T) { + expectedFormats := []string{"markdown", "docx", "pdf"} + + parser := &MockCourseParser{} + factory := &MockExporterFactory{ + mockGetSupportedFormats: func() []string { + return expectedFormats + }, + } + + app := NewApp(parser, factory) + formats := app.GetSupportedFormats() + + if len(formats) != len(expectedFormats) { + t.Errorf("Expected %d formats, got %d", len(expectedFormats), len(formats)) + } + + for i, format := range formats { + if format != expectedFormats[i] { + t.Errorf("Expected format '%s' at index %d, got '%s'", expectedFormats[i], i, format) + } + } +} + +// contains checks if a string contains a substring. +func contains(s, substr string) bool { + return len(s) >= len(substr) && + (len(substr) == 0 || + s == substr || + (len(s) > len(substr) && + (s[:len(substr)] == substr || + s[len(s)-len(substr):] == substr || + containsSubstring(s, substr)))) +} + +// containsSubstring checks if s contains substr as a substring. +func containsSubstring(s, substr string) bool { + for i := 0; i <= len(s)-len(substr); i++ { + if s[i:i+len(substr)] == substr { + return true + } + } + return false +} diff --git a/internal/services/html_cleaner_test.go b/internal/services/html_cleaner_test.go new file mode 100644 index 0000000..d3f9252 --- /dev/null +++ b/internal/services/html_cleaner_test.go @@ -0,0 +1,325 @@ +// Package services_test provides tests for the HTML cleaner service. +package services + +import ( + "strings" + "testing" +) + +// TestNewHTMLCleaner tests the NewHTMLCleaner constructor. +func TestNewHTMLCleaner(t *testing.T) { + cleaner := NewHTMLCleaner() + + if cleaner == nil { + t.Fatal("NewHTMLCleaner() returned nil") + } +} + +// TestHTMLCleaner_CleanHTML tests the CleanHTML method with various HTML inputs. +func TestHTMLCleaner_CleanHTML(t *testing.T) { + cleaner := NewHTMLCleaner() + + tests := []struct { + name string + input string + expected string + }{ + { + name: "plain text (no HTML)", + input: "This is plain text", + expected: "This is plain text", + }, + { + name: "empty string", + input: "", + expected: "", + }, + { + name: "simple HTML tag", + input: "

Hello world

", + expected: "Hello world", + }, + { + name: "multiple HTML tags", + input: "

Title

Paragraph text

", + expected: "TitleParagraph text", + }, + { + name: "nested HTML tags", + input: "

Title

Paragraph with bold text

", + expected: "TitleParagraph with bold text", + }, + { + name: "HTML with attributes", + input: "

Text with attributes

", + expected: "Text with attributes", + }, + { + name: "self-closing tags", + input: "Line 1
Line 2
End", + expected: "Line 1Line 2End", + }, + { + name: "HTML entities - basic", + input: "AT&T <company> "quoted"   text", + expected: "AT&T \"quoted\" text", + }, + { + name: "HTML entities - apostrophe", + input: "It's a test", + expected: "It's a test", + }, + { + name: "HTML entities - special characters", + input: "ïber ëlite écarté", + expected: "ïber ëlite écarté", + }, + { + name: "HTML entities - nbsp", + input: "Word1   Word2", + expected: "Word1 Word2", + }, + { + name: "mixed HTML and entities", + input: "

Hello & welcome to our site!

", + expected: "Hello & welcome to our site!", + }, + { + name: "multiple whitespace", + input: "Text with\t\tmultiple\n\nspaces", + expected: "Text with multiple spaces", + }, + { + name: "whitespace with HTML", + input: "

Text with

spaces
", + expected: "Text with spaces", + }, + { + name: "complex content", + input: "

Course Title

This is a great course about & HTML entities like   and "quotes".

", + expected: "Course TitleThis is a great course about & HTML entities like and \"quotes\".", + }, + { + name: "malformed HTML", + input: "

Unclosed paragraph

Another tag

", + expected: "Unclosed paragraphAnother tag", + }, + { + name: "HTML comments (should be removed)", + input: "Text beforeText after", + expected: "Text beforeText after", + }, + { + name: "script and style tags content", + input: "Content", + expected: "alert('test');Contentbody{color:red;}", + }, + { + name: "line breaks and formatting", + input: "

Line 1

\n

Line 2

\n

Line 3

", + expected: "Line 1 Line 2 Line 3", + }, + { + name: "only whitespace", + input: " \t\n ", + expected: "", + }, + { + name: "only HTML tags", + input: "

", + expected: "", + }, + { + name: "HTML with newlines", + input: "

\n Paragraph with\n line breaks\n

", + expected: "Paragraph with line breaks", + }, + { + name: "complex nested structure", + input: "

Title

First paragraph with link.

  • Item 1
  • Item 2
", + expected: "TitleFirst paragraph with link.Item 1Item 2", + }, + { + name: "entities in attributes (should still be processed)", + input: "

Content

", + expected: "Content", + }, + { + name: "special HTML5 entities", + input: "Left arrow ← Right arrow →", + expected: "Left arrow ← Right arrow →", // These are not handled by the cleaner + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := cleaner.CleanHTML(tt.input) + if result != tt.expected { + t.Errorf("CleanHTML(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +// TestHTMLCleaner_CleanHTML_LargeContent tests the CleanHTML method with large content. +func TestHTMLCleaner_CleanHTML_LargeContent(t *testing.T) { + cleaner := NewHTMLCleaner() + + // Create a large HTML string + var builder strings.Builder + builder.WriteString("") + for i := 0; i < 1000; i++ { + builder.WriteString("

Paragraph ") + builder.WriteString(string(rune('0' + i%10))) + builder.WriteString(" with some content & entities.

") + } + builder.WriteString("") + + input := builder.String() + result := cleaner.CleanHTML(input) + + // Check that HTML tags are removed + if strings.Contains(result, "<") || strings.Contains(result, ">") { + t.Error("Result should not contain HTML tags") + } + + // Check that content is preserved + if !strings.Contains(result, "Paragraph") { + t.Error("Result should contain paragraph content") + } + + // Check that entities are converted + if strings.Contains(result, "&") { + t.Error("Result should not contain unconverted HTML entities") + } + if !strings.Contains(result, "&") { + t.Error("Result should contain converted ampersand") + } +} + +// TestHTMLCleaner_CleanHTML_EdgeCases tests edge cases for the CleanHTML method. +func TestHTMLCleaner_CleanHTML_EdgeCases(t *testing.T) { + cleaner := NewHTMLCleaner() + + tests := []struct { + name string + input string + expected string + }{ + { + name: "only entities", + input: "&<>"' ", + expected: "&<>\"'", + }, + { + name: "repeated entities", + input: "&&&", + expected: "&&&", + }, + { + name: "entities without semicolon (should not be converted)", + input: "& test < test", + expected: "& test < test", + }, + { + name: "mixed valid and invalid entities", + input: "& &invalid; < &fake;", + expected: "& &invalid; < &fake;", + }, + { + name: "unclosed tag at end", + input: "Content

with unclosed", + expected: "Content with unclosed", + }, + { + name: "tag with no closing bracket", + input: "Content

Deep content
", + expected: "Deep content", + }, + { + name: "empty tags with whitespace", + input: "

\t\n
", + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := cleaner.CleanHTML(tt.input) + if result != tt.expected { + t.Errorf("CleanHTML(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +// TestHTMLCleaner_CleanHTML_Unicode tests Unicode content handling. +func TestHTMLCleaner_CleanHTML_Unicode(t *testing.T) { + cleaner := NewHTMLCleaner() + + tests := []struct { + name string + input string + expected string + }{ + { + name: "unicode characters", + input: "

Hello 世界! Café naïve résumé

", + expected: "Hello 世界! Café naïve résumé", + }, + { + name: "unicode with entities", + input: "

Unicode: 你好 & emoji: 🌍

", + expected: "Unicode: 你好 & emoji: 🌍", + }, + { + name: "mixed scripts", + input: "
English العربية русский 日本語
", + expected: "English العربية русский 日本語", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := cleaner.CleanHTML(tt.input) + if result != tt.expected { + t.Errorf("CleanHTML(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +// BenchmarkHTMLCleaner_CleanHTML benchmarks the CleanHTML method. +func BenchmarkHTMLCleaner_CleanHTML(b *testing.B) { + cleaner := NewHTMLCleaner() + input := "

Course Title

This is a great course about & HTML entities like   and "quotes".

  • Item 1
  • Item 2
" + + b.ResetTimer() + for i := 0; i < b.N; i++ { + cleaner.CleanHTML(input) + } +} + +// BenchmarkHTMLCleaner_CleanHTML_Large benchmarks the CleanHTML method with large content. +func BenchmarkHTMLCleaner_CleanHTML_Large(b *testing.B) { + cleaner := NewHTMLCleaner() + + // Create a large HTML string + var builder strings.Builder + for i := 0; i < 100; i++ { + builder.WriteString("

Paragraph ") + builder.WriteString(string(rune('0' + i%10))) + builder.WriteString(" with some content & entities <test>.

") + } + input := builder.String() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + cleaner.CleanHTML(input) + } +} diff --git a/internal/services/parser.go b/internal/services/parser.go index ae5a797..6dd60b4 100644 --- a/internal/services/parser.go +++ b/internal/services/parser.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "net/http" + "net/url" "os" "regexp" "time" @@ -112,6 +113,17 @@ func (p *ArticulateParser) LoadCourseFromFile(filePath string) (*models.Course, // - The share ID string if found // - An error if the share ID can't be extracted from the URI func (p *ArticulateParser) extractShareID(uri string) (string, error) { + // Parse the URL to validate the domain + parsedURL, err := url.Parse(uri) + if err != nil { + return "", fmt.Errorf("invalid URI: %s", uri) + } + + // Validate that it's an Articulate Rise domain + if parsedURL.Host != "rise.articulate.com" { + return "", fmt.Errorf("invalid domain for Articulate Rise URI: %s", parsedURL.Host) + } + re := regexp.MustCompile(`/share/([a-zA-Z0-9_-]+)`) matches := re.FindStringSubmatch(uri) if len(matches) < 2 { diff --git a/internal/services/parser_test.go b/internal/services/parser_test.go new file mode 100644 index 0000000..98a0814 --- /dev/null +++ b/internal/services/parser_test.go @@ -0,0 +1,440 @@ +// Package services_test provides tests for the parser service. +package services + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/kjanat/articulate-parser/internal/models" +) + +// TestNewArticulateParser tests the NewArticulateParser constructor. +func TestNewArticulateParser(t *testing.T) { + parser := NewArticulateParser() + + if parser == nil { + t.Fatal("NewArticulateParser() returned nil") + } + + // Type assertion to check internal structure + articulateParser, ok := parser.(*ArticulateParser) + if !ok { + t.Fatal("NewArticulateParser() returned wrong type") + } + + expectedBaseURL := "https://rise.articulate.com" + if articulateParser.BaseURL != expectedBaseURL { + t.Errorf("Expected BaseURL '%s', got '%s'", expectedBaseURL, articulateParser.BaseURL) + } + + if articulateParser.Client == nil { + t.Error("Client should not be nil") + } + + expectedTimeout := 30 * time.Second + if articulateParser.Client.Timeout != expectedTimeout { + t.Errorf("Expected timeout %v, got %v", expectedTimeout, articulateParser.Client.Timeout) + } +} + +// TestArticulateParser_FetchCourse tests the FetchCourse method. +func TestArticulateParser_FetchCourse(t *testing.T) { + // Create a test course object + testCourse := &models.Course{ + ShareID: "test-share-id", + Author: "Test Author", + Course: models.CourseInfo{ + ID: "test-course-id", + Title: "Test Course", + Description: "Test Description", + }, + } + + // Create test server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Check request path + expectedPath := "/api/rise-runtime/boot/share/test-share-id" + if r.URL.Path != expectedPath { + t.Errorf("Expected path '%s', got '%s'", expectedPath, r.URL.Path) + } + + // Check request method + if r.Method != http.MethodGet { + t.Errorf("Expected method GET, got %s", r.Method) + } + + // Return mock response + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(testCourse); err != nil { + t.Fatalf("Failed to encode test course: %v", err) + } + })) + defer server.Close() + + // Create parser with test server URL + parser := &ArticulateParser{ + BaseURL: server.URL, + Client: &http.Client{ + Timeout: 5 * time.Second, + }, + } + + tests := []struct { + name string + uri string + expectedError string + }{ + { + name: "valid articulate rise URI", + uri: "https://rise.articulate.com/share/test-share-id#/", + }, + { + name: "valid articulate rise URI without fragment", + uri: "https://rise.articulate.com/share/test-share-id", + }, + { + name: "invalid URI format", + uri: "invalid-uri", + expectedError: "invalid domain for Articulate Rise URI:", + }, + { + name: "empty URI", + uri: "", + expectedError: "invalid domain for Articulate Rise URI:", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + course, err := parser.FetchCourse(tt.uri) + + if tt.expectedError != "" { + if err == nil { + t.Fatalf("Expected error containing '%s', got nil", tt.expectedError) + } + if !strings.Contains(err.Error(), tt.expectedError) { + t.Errorf("Expected error containing '%s', got '%s'", tt.expectedError, err.Error()) + } + } else { + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + if course == nil { + t.Fatal("Expected course, got nil") + } + if course.ShareID != testCourse.ShareID { + t.Errorf("Expected ShareID '%s', got '%s'", testCourse.ShareID, course.ShareID) + } + } + }) + } +} + +// TestArticulateParser_FetchCourse_NetworkError tests network error handling. +func TestArticulateParser_FetchCourse_NetworkError(t *testing.T) { + // Create parser with invalid URL to simulate network error + parser := &ArticulateParser{ + BaseURL: "http://localhost:99999", // Invalid port + Client: &http.Client{ + Timeout: 1 * time.Millisecond, // Very short timeout + }, + } + + _, err := parser.FetchCourse("https://rise.articulate.com/share/test-share-id") + if err == nil { + t.Fatal("Expected network error, got nil") + } + + if !strings.Contains(err.Error(), "failed to fetch course data") { + t.Errorf("Expected error to contain 'failed to fetch course data', got '%s'", err.Error()) + } +} + +// TestArticulateParser_FetchCourse_InvalidJSON tests invalid JSON response handling. +func TestArticulateParser_FetchCourse_InvalidJSON(t *testing.T) { + // Create test server that returns invalid JSON + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte("invalid json")) + })) + defer server.Close() + + parser := &ArticulateParser{ + BaseURL: server.URL, + Client: &http.Client{ + Timeout: 5 * time.Second, + }, + } + + _, err := parser.FetchCourse("https://rise.articulate.com/share/test-share-id") + if err == nil { + t.Fatal("Expected JSON parsing error, got nil") + } + + if !strings.Contains(err.Error(), "failed to unmarshal JSON") { + t.Errorf("Expected error to contain 'failed to unmarshal JSON', got '%s'", err.Error()) + } +} + +// TestArticulateParser_LoadCourseFromFile tests the LoadCourseFromFile method. +func TestArticulateParser_LoadCourseFromFile(t *testing.T) { + // Create a temporary test file + testCourse := &models.Course{ + ShareID: "file-test-share-id", + Author: "File Test Author", + Course: models.CourseInfo{ + ID: "file-test-course-id", + Title: "File Test Course", + Description: "File Test Description", + }, + } + + // Create temporary directory and file + tempDir := t.TempDir() + tempFile := filepath.Join(tempDir, "test-course.json") + + // Write test data to file + data, err := json.Marshal(testCourse) + if err != nil { + t.Fatalf("Failed to marshal test course: %v", err) + } + + if err := os.WriteFile(tempFile, data, 0644); err != nil { + t.Fatalf("Failed to write test file: %v", err) + } + + parser := NewArticulateParser() + + tests := []struct { + name string + filePath string + expectedError string + }{ + { + name: "valid file", + filePath: tempFile, + }, + { + name: "nonexistent file", + filePath: filepath.Join(tempDir, "nonexistent.json"), + expectedError: "failed to read file", + }, + { + name: "empty path", + filePath: "", + expectedError: "failed to read file", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + course, err := parser.LoadCourseFromFile(tt.filePath) + + if tt.expectedError != "" { + if err == nil { + t.Fatalf("Expected error containing '%s', got nil", tt.expectedError) + } + if !strings.Contains(err.Error(), tt.expectedError) { + t.Errorf("Expected error containing '%s', got '%s'", tt.expectedError, err.Error()) + } + } else { + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + if course == nil { + t.Fatal("Expected course, got nil") + } + if course.ShareID != testCourse.ShareID { + t.Errorf("Expected ShareID '%s', got '%s'", testCourse.ShareID, course.ShareID) + } + } + }) + } +} + +// TestArticulateParser_LoadCourseFromFile_InvalidJSON tests invalid JSON file handling. +func TestArticulateParser_LoadCourseFromFile_InvalidJSON(t *testing.T) { + // Create temporary file with invalid JSON + tempDir := t.TempDir() + tempFile := filepath.Join(tempDir, "invalid.json") + + if err := os.WriteFile(tempFile, []byte("invalid json content"), 0644); err != nil { + t.Fatalf("Failed to write test file: %v", err) + } + + parser := NewArticulateParser() + _, err := parser.LoadCourseFromFile(tempFile) + + if err == nil { + t.Fatal("Expected JSON parsing error, got nil") + } + + if !strings.Contains(err.Error(), "failed to unmarshal JSON") { + t.Errorf("Expected error to contain 'failed to unmarshal JSON', got '%s'", err.Error()) + } +} + +// TestExtractShareID tests the extractShareID method. +func TestExtractShareID(t *testing.T) { + parser := &ArticulateParser{} + + tests := []struct { + name string + uri string + expected string + hasError bool + }{ + { + name: "standard articulate rise URI with fragment", + uri: "https://rise.articulate.com/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO#/", + expected: "N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO", + }, + { + name: "standard articulate rise URI without fragment", + uri: "https://rise.articulate.com/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO", + expected: "N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO", + }, + { + name: "URI with trailing slash", + uri: "https://rise.articulate.com/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO/", + expected: "N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO", + }, + { + name: "short share ID", + uri: "https://rise.articulate.com/share/abc123", + expected: "abc123", + }, + { + name: "share ID with hyphens and underscores", + uri: "https://rise.articulate.com/share/test_ID-123_abc", + expected: "test_ID-123_abc", + }, + { + name: "invalid URI - no share path", + uri: "https://rise.articulate.com/", + hasError: true, + }, + { + name: "invalid URI - wrong domain", + uri: "https://example.com/share/test123", + hasError: true, + }, + { + name: "invalid URI - no share ID", + uri: "https://rise.articulate.com/share/", + hasError: true, + }, + { + name: "empty URI", + uri: "", + hasError: true, + }, + { + name: "malformed URI", + uri: "not-a-uri", + hasError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := parser.extractShareID(tt.uri) + + if tt.hasError { + if err == nil { + t.Fatalf("Expected error for URI '%s', got nil", tt.uri) + } + } else { + if err != nil { + t.Fatalf("Expected no error for URI '%s', got: %v", tt.uri, err) + } + if result != tt.expected { + t.Errorf("Expected share ID '%s', got '%s'", tt.expected, result) + } + } + }) + } +} + +// TestBuildAPIURL tests the buildAPIURL method. +func TestBuildAPIURL(t *testing.T) { + parser := &ArticulateParser{ + BaseURL: "https://rise.articulate.com", + } + + tests := []struct { + name string + shareID string + expected string + }{ + { + name: "standard share ID", + shareID: "N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO", + expected: "https://rise.articulate.com/api/rise-runtime/boot/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO", + }, + { + name: "short share ID", + shareID: "abc123", + expected: "https://rise.articulate.com/api/rise-runtime/boot/share/abc123", + }, + { + name: "share ID with special characters", + shareID: "test_ID-123_abc", + expected: "https://rise.articulate.com/api/rise-runtime/boot/share/test_ID-123_abc", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := parser.buildAPIURL(tt.shareID) + if result != tt.expected { + t.Errorf("Expected URL '%s', got '%s'", tt.expected, result) + } + }) + } +} + +// TestBuildAPIURL_DifferentBaseURL tests buildAPIURL with different base URLs. +func TestBuildAPIURL_DifferentBaseURL(t *testing.T) { + parser := &ArticulateParser{ + BaseURL: "https://custom.domain.com", + } + + shareID := "test123" + expected := "https://custom.domain.com/api/rise-runtime/boot/share/test123" + result := parser.buildAPIURL(shareID) + + if result != expected { + t.Errorf("Expected URL '%s', got '%s'", expected, result) + } +} + +// BenchmarkExtractShareID benchmarks the extractShareID method. +func BenchmarkExtractShareID(b *testing.B) { + parser := &ArticulateParser{} + uri := "https://rise.articulate.com/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO#/" + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = parser.extractShareID(uri) + } +} + +// BenchmarkBuildAPIURL benchmarks the buildAPIURL method. +func BenchmarkBuildAPIURL(b *testing.B) { + parser := &ArticulateParser{ + BaseURL: "https://rise.articulate.com", + } + shareID := "N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO" + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = parser.buildAPIURL(shareID) + } +} diff --git a/internal/version/version.go b/internal/version/version.go index d523ac3..9a073f4 100644 --- a/internal/version/version.go +++ b/internal/version/version.go @@ -5,7 +5,7 @@ package version // Version information. var ( // Version is the current version of the application. - Version = "0.1.1" + Version = "0.2.0" // BuildTime is the time the binary was built. BuildTime = "unknown" diff --git a/main_test.go b/main_test.go new file mode 100644 index 0000000..c982980 --- /dev/null +++ b/main_test.go @@ -0,0 +1,175 @@ +// Package main_test provides tests for the main package utility functions. +package main + +import ( + "testing" +) + +// TestIsURI tests the isURI function with various input scenarios. +func TestIsURI(t *testing.T) { + tests := []struct { + name string + input string + expected bool + }{ + { + name: "valid HTTP URI", + input: "http://example.com", + expected: true, + }, + { + name: "valid HTTPS URI", + input: "https://example.com", + expected: true, + }, + { + name: "valid Articulate Rise URI", + input: "https://rise.articulate.com/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO#/", + expected: true, + }, + { + name: "local file path", + input: "C:\\Users\\test\\file.json", + expected: false, + }, + { + name: "relative file path", + input: "./sample.json", + expected: false, + }, + { + name: "filename only", + input: "sample.json", + expected: false, + }, + { + name: "empty string", + input: "", + expected: false, + }, + { + name: "short string", + input: "http", + expected: false, + }, + { + name: "malformed URI", + input: "htp://example.com", + expected: false, + }, + { + name: "FTP URI", + input: "ftp://example.com", + expected: false, + }, + { + name: "HTTP with extra characters", + input: "xhttp://example.com", + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := isURI(tt.input) + if result != tt.expected { + t.Errorf("isURI(%q) = %v, want %v", tt.input, result, tt.expected) + } + }) + } +} + +// TestJoinStrings tests the joinStrings function with various input scenarios. +func TestJoinStrings(t *testing.T) { + tests := []struct { + name string + strs []string + separator string + expected string + }{ + { + name: "empty slice", + strs: []string{}, + separator: ", ", + expected: "", + }, + { + name: "single string", + strs: []string{"hello"}, + separator: ", ", + expected: "hello", + }, + { + name: "two strings with comma separator", + strs: []string{"markdown", "docx"}, + separator: ", ", + expected: "markdown, docx", + }, + { + name: "three strings with comma separator", + strs: []string{"markdown", "md", "docx"}, + separator: ", ", + expected: "markdown, md, docx", + }, + { + name: "multiple strings with pipe separator", + strs: []string{"option1", "option2", "option3"}, + separator: " | ", + expected: "option1 | option2 | option3", + }, + { + name: "strings with no separator", + strs: []string{"a", "b", "c"}, + separator: "", + expected: "abc", + }, + { + name: "strings with newline separator", + strs: []string{"line1", "line2", "line3"}, + separator: "\n", + expected: "line1\nline2\nline3", + }, + { + name: "empty strings in slice", + strs: []string{"", "middle", ""}, + separator: "-", + expected: "-middle-", + }, + { + name: "nil slice", + strs: nil, + separator: ", ", + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := joinStrings(tt.strs, tt.separator) + if result != tt.expected { + t.Errorf("joinStrings(%v, %q) = %q, want %q", tt.strs, tt.separator, result, tt.expected) + } + }) + } +} + +// BenchmarkIsURI benchmarks the isURI function performance. +func BenchmarkIsURI(b *testing.B) { + testStr := "https://rise.articulate.com/share/N_APNg40Vr2CSH2xNz-ZLATM5kNviDIO#/" + + b.ResetTimer() + for i := 0; i < b.N; i++ { + isURI(testStr) + } +} + +// BenchmarkJoinStrings benchmarks the joinStrings function performance. +func BenchmarkJoinStrings(b *testing.B) { + strs := []string{"markdown", "md", "docx", "word", "pdf", "html"} + separator := ", " + + b.ResetTimer() + for i := 0; i < b.N; i++ { + joinStrings(strs, separator) + } +}