diff --git a/.env.example b/.env.example index 5acd897..cbe3191 100644 --- a/.env.example +++ b/.env.example @@ -1,10 +1,38 @@ # Lidify Configuration # Copy to .env and edit as needed +# ============================================================================== +# Database Configuration +# ============================================================================== +DATABASE_URL="postgresql://lidify:lidify@localhost:5433/lidify" + +# ============================================================================== +# Redis Configuration +# ============================================================================== +# Note: Redis container port is mapped to 6380 to avoid conflicts with other Redis instances +REDIS_URL="redis://localhost:6380" + # ============================================================================== # REQUIRED: Path to your music library # ============================================================================== MUSIC_PATH=/path/to/your/music +# DEVELOPMENT: Use your local path (e.g., /home/user/Music) +# DOCKER: This is the HOST path that gets mounted to /music in the container +# The backend inside Docker always uses /music, not this value. +# Example: MUSIC_PATH=~/Music (container mounts as ~/Music:/music) + +# ============================================================================== +# REQUIRED: Security Keys +# ============================================================================== + +# Encryption key for sensitive data (API keys, passwords, 2FA secrets) +# CRITICAL: You MUST set this before starting Lidify +# Generate with: openssl rand -base64 32 +SETTINGS_ENCRYPTION_KEY= + +# Session secret (auto-generated if not set) +# Generate with: openssl rand -base64 32 +SESSION_SECRET= # ============================================================================== # OPTIONAL: Customize these if needed @@ -16,9 +44,14 @@ PORT=3030 # Timezone (default: UTC) TZ=UTC -# Session secret (auto-generated if not set) -# Generate with: openssl rand -base64 32 -SESSION_SECRET= +# Logging level (default: debug in development, warn in production) +# Options: debug, info, warn, error, silent +LOG_LEVEL=debug + +# Allow public access to API documentation in production (default: false) +# Set to 'true' to make /api/docs accessible without authentication in production +# Development mode always allows public access +# DOCS_PUBLIC=true # DockerHub username (for pulling images) # Your DockerHub username (same as GitHub: chevron7locked) @@ -26,3 +59,13 @@ DOCKERHUB_USERNAME=chevron7locked # Version tag (use 'latest' or specific like 'v1.0.0') VERSION=latest + +# ============================================================================== +# OPTIONAL: Audio Analyzer CPU Control +# ============================================================================== + +# Audio Analyzer CPU Control +# AUDIO_ANALYSIS_WORKERS=2 # Number of parallel worker processes (1-8) +# AUDIO_ANALYSIS_THREADS_PER_WORKER=1 # Threads per worker for TensorFlow/FFT (1-4, default 1) +# Formula: max_cpu_usage ≈ WORKERS × (THREADS_PER_WORKER + 1) × 100% +# Example: 2 workers × (1 thread + 1 overhead) = ~400% CPU (4 cores) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000..f4b17ad --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,102 @@ +name: Bug Report +description: Report a bug or unexpected behavior +title: "[Bug]: " +labels: ["bug", "needs triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to report a bug. Please fill out the information below to help us diagnose and fix the issue. + + - type: textarea + id: description + attributes: + label: Bug Description + description: A clear and concise description of what the bug is. + placeholder: Describe the bug... + validations: + required: true + + - type: textarea + id: reproduction + attributes: + label: Steps to Reproduce + description: Step-by-step instructions to reproduce the behavior. + placeholder: | + 1. Go to '...' + 2. Click on '...' + 3. Scroll down to '...' + 4. See error + validations: + required: true + + - type: textarea + id: expected + attributes: + label: Expected Behavior + description: What did you expect to happen? + placeholder: Describe what should have happened... + validations: + required: true + + - type: textarea + id: actual + attributes: + label: Actual Behavior + description: What actually happened? + placeholder: Describe what actually happened... + validations: + required: true + + - type: input + id: version + attributes: + label: Lidify Version + description: What version of Lidify are you running? + placeholder: "e.g., v1.0.0, nightly-2024-01-15, or commit hash" + validations: + required: true + + - type: dropdown + id: deployment + attributes: + label: Deployment Method + description: How are you running Lidify? + options: + - Docker (docker-compose) + - Docker (standalone) + - Manual/Source + - Other + validations: + required: true + + - type: textarea + id: environment + attributes: + label: Environment Details + description: Any relevant environment information (OS, browser, Docker version, etc.) + placeholder: | + - OS: Ubuntu 22.04 + - Docker: 24.0.5 + - Browser: Firefox 120 + validations: + required: false + + - type: textarea + id: logs + attributes: + label: Relevant Logs + description: Please copy and paste any relevant log output. This will be automatically formatted into code. + render: shell + validations: + required: false + + - type: checkboxes + id: checklist + attributes: + label: Checklist + options: + - label: I have searched existing issues to ensure this bug hasn't already been reported + required: true + - label: I am using a supported version of Lidify + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..e3e3c9d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: Questions & Discussions + url: https://github.com/Chevron7Locked/lidify/discussions + about: Ask questions and discuss Lidify in GitHub Discussions diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000..d7a2f4e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,64 @@ +name: Feature Request +description: Suggest a new feature or enhancement +title: "[Feature]: " +labels: ["enhancement", "needs triage"] +body: + - type: markdown + attributes: + value: | + Thanks for suggesting a feature! Please provide as much detail as possible. + + - type: textarea + id: problem + attributes: + label: Problem or Use Case + description: What problem does this feature solve? What are you trying to accomplish? + placeholder: "I'm trying to... but currently..." + validations: + required: true + + - type: textarea + id: solution + attributes: + label: Proposed Solution + description: Describe the feature you'd like to see implemented. + placeholder: Describe your ideal solution... + validations: + required: true + + - type: textarea + id: alternatives + attributes: + label: Alternatives Considered + description: Have you considered any alternative solutions or workarounds? + placeholder: Describe alternatives you've considered... + validations: + required: false + + - type: dropdown + id: scope + attributes: + label: Feature Scope + description: How big of a change is this? + options: + - Small (UI tweak, minor enhancement) + - Medium (new component, significant enhancement) + - Large (new major feature, architectural change) + validations: + required: true + + - type: checkboxes + id: contribution + attributes: + label: Contribution + options: + - label: I would be willing to help implement this feature + required: false + + - type: checkboxes + id: checklist + attributes: + label: Checklist + options: + - label: I have searched existing issues to ensure this hasn't already been requested + required: true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..4ae9d82 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,37 @@ +## Description + + + +## Type of Change + +- [ ] Bug fix (non-breaking change that fixes an issue) +- [ ] New feature (non-breaking change that adds functionality) +- [ ] Enhancement (improvement to existing functionality) +- [ ] Documentation update +- [ ] Code cleanup / refactoring +- [ ] Other (please describe): + +## Related Issues + +Fixes # + +## Changes Made + +- +- +- + +## Testing Done + +- [ ] Tested locally with Docker +- [ ] Tested specific functionality: + +## Screenshots (if applicable) + +## Checklist + +- [ ] My code follows the project's code style +- [ ] I have tested my changes locally +- [ ] I have updated documentation if needed +- [ ] My changes don't introduce new warnings +- [ ] This PR targets the `main` branch diff --git a/.github/workflows/docker-nightly.yml b/.github/workflows/docker-nightly.yml new file mode 100644 index 0000000..4f750c6 --- /dev/null +++ b/.github/workflows/docker-nightly.yml @@ -0,0 +1,55 @@ +name: Nightly Build + +on: + push: + branches: [main] + tags-ignore: + - "v*" # Don't trigger on version tags - docker-publish handles those + +env: + IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/lidify + +jobs: + build-nightly: + name: Build & Push Nightly Image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Free up disk space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf /usr/local/share/boost + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Get short SHA + id: sha + run: echo "short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + + - name: Build and push nightly + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + tags: | + ${{ env.IMAGE_NAME }}:nightly + ${{ env.IMAGE_NAME }}:nightly-${{ steps.sha.outputs.short }} + labels: | + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.version=nightly-${{ steps.sha.outputs.short }} + cache-from: type=gha + cache-to: type=gha,mode=max + # ARM64 disabled due to QEMU emulation issues with npm packages + platforms: linux/amd64 diff --git a/.github/workflows/pr-checks.yml b/.github/workflows/pr-checks.yml new file mode 100644 index 0000000..bedf27a --- /dev/null +++ b/.github/workflows/pr-checks.yml @@ -0,0 +1,48 @@ +name: PR Checks + +on: + pull_request: + branches: [main] + types: [opened, synchronize, reopened] + +jobs: + lint-frontend: + name: Lint Frontend + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: frontend/package-lock.json + + - name: Install frontend dependencies + working-directory: frontend + run: npm ci + + - name: Run ESLint on frontend + working-directory: frontend + run: npm run lint + + build-docker: + name: Docker Build Check + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image (no push) + uses: docker/build-push-action@v5 + with: + context: . + push: false + tags: lidify:pr-check + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.gitignore b/.gitignore index f4dab33..5e0ffec 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,7 @@ .env.test.local .env.production.local .env.local +.roomodes # ============================================================================= # Dependencies @@ -35,7 +36,7 @@ ENV/ **/.venv/ # ============================================================================= -# Build Outputs +# Build # ============================================================================= # Frontend (Next.js) frontend/.next/ @@ -316,6 +317,17 @@ bower_components reset-and-setup.sh organize-singles.sh +# AI Context Management (keep locally, don't push to GitHub) +context_portal/ + +# Internal Development Documentation (keep locally, don't push to GitHub) +docs/ +**/docs/ + + +# Temporary commit messages +COMMIT_MESSAGE.txt + # Backend development logs backend/logs/ @@ -349,6 +361,8 @@ soularr/ **/.cursor/ .vscode/ **/.vscode/ +.roo/ +**/.roo/ # ============================================================================= # Android Build Artifacts (contains local paths) @@ -381,3 +395,7 @@ backend/mullvad/ # Android signing lidify.keystore keystore.b64 +.aider* + +issues/ +plans/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..420fc98 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,203 @@ +# Changelog + +All notable changes to Lidify will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.3.0] - 2026-01-06 + +### Added + +- Multi-source download system with configurable Soulseek/Lidarr primary source and fallback options +- Configurable enrichment speed control (1-5x concurrency) in Settings → Cache & Automation +- Stale job cleanup button in Settings to clear stuck Discovery batches and downloads +- Mobile touch drag support for seek sliders on all player views +- Skip ±30s buttons for audiobooks/podcasts on mobile players +- iOS PWA media controls support (Control Center and Lock Screen) +- Artist name alias resolution via Last.fm (e.g., "of mice" → "Of Mice & Men") +- Library grid now supports 8 columns on ultra-wide displays (2xl breakpoint) +- Artist discography sorting options (Year/Date Added) +- Enrichment failure notifications with retry/skip modal +- Download history deduplication to prevent duplicate entries +- Utility function for normalizing API responses to arrays (`normalizeToArray`) - @tombatossals +- Keyword-based mood scoring for standard analysis mode tracks - @RustyJonez +- Global and route-level error boundaries for better error handling +- React Strict Mode for development quality checks +- Next.js image optimization enabled by default +- Mobile-aware animation rendering (GalaxyBackground disables particles on mobile) +- Accessibility motion preferences support (`prefers-reduced-motion`) +- Lazy loading for heavy components (MoodMixer, VibeOverlay, MetadataEditor) +- Bundle analyzer tooling (`npm run analyze`) +- Loading states for all 10 priority routes +- Skip links for keyboard navigation (WCAG 2.1 AA compliance) +- ARIA attributes on all interactive controls and navigation elements +- Toast notifications with ARIA live regions for screen readers +- Bull Board admin dashboard authentication (requires admin user) +- Lidarr webhook signature verification with configurable secret +- Encryption key validation on startup (prevents insecure defaults) +- Session cookie security (httpOnly, sameSite=strict, secure in production) +- Swagger API documentation authentication in production +- JWT token expiration (24h access tokens, 30d refresh tokens) +- JWT refresh token endpoint (`/api/auth/refresh`) +- Token version validation (password changes invalidate existing tokens) +- Download queue reconciliation on server startup (marks stale jobs as failed) +- Redis batch operations for cache warmup (MULTI/EXEC pipelining) +- Memory-efficient database-level shuffle (`ORDER BY RANDOM() LIMIT n`) +- Dynamic import caching in queue cleaner (lazy-load pattern) +- Database index for `DownloadJob.targetMbid` field +- PWA install prompt dismissal persistence (7-day cooldown) + +### Fixed + +- **Critical:** Audio analyzer crashes on libraries with non-ASCII filenames ([#6](https://github.com/Chevron7Locked/lidify/issues/6)) +- **Critical:** Audio analyzer BrokenProcessPool after ~1900 tracks ([#21](https://github.com/Chevron7Locked/lidify/issues/21)) +- **Critical:** Audio analyzer OOM kills with aggressive worker auto-scaling ([#26](https://github.com/Chevron7Locked/lidify/issues/26)) +- **Critical:** Audio analyzer model downloads and volume mount conflicts ([#2](https://github.com/Chevron7Locked/lidify/issues/2)) +- Radio stations playing songs from wrong decades due to remaster dates ([#43](https://github.com/Chevron7Locked/lidify/issues/43)) +- Manual metadata editing failing with 500 errors ([#9](https://github.com/Chevron7Locked/lidify/issues/9)) +- Active downloads not resolving after Lidarr successfully imports ([#31](https://github.com/Chevron7Locked/lidify/issues/31)) +- Discovery playlist downloads failing for artists with large catalogs ([#34](https://github.com/Chevron7Locked/lidify/issues/34)) +- Discovery batches stuck in "downloading" status indefinitely +- Audio analyzer rhythm extraction failures on short/silent audio ([#13](https://github.com/Chevron7Locked/lidify/issues/13)) +- "Of Mice & Men" artist name truncated to "Of Mice" during scanning +- Edition variant albums (Remastered, Deluxe) failing with "No releases available" +- Downloads stuck in "Lidarr #1" state for 5 minutes before failing +- Download duplicate prevention race condition causing 10+ duplicate jobs +- Lidarr downloads incorrectly cancelled during temporary network issues +- Discovery Weekly track durations showing "NaN:NaN" +- Artist name search ampersand handling ("Earth, Wind & Fire") +- Vibe overlay display issues on mobile devices +- Pagination scroll behavior (now scrolls to top instead of bottom) +- LastFM API crashes when receiving single objects instead of arrays ([#37](https://github.com/Chevron7Locked/lidify/issues/37)) - @tombatossals +- Mood bucket infinite loop for tracks analyzed in standard mode ([#40](https://github.com/Chevron7Locked/lidify/issues/40)) - @RustyJonez +- Playlist visibility toggle not properly syncing hide/show state - @tombatossals +- Audio player time display showing current time exceeding total duration (e.g., "58:00 / 54:34") +- Progress bar could exceed 100% for long-form media with stale metadata +- Enrichment P2025 errors when retrying enrichment for deleted entities +- Download settings fallback not resetting when changing primary source +- SeekSlider touch events bubbling to parent OverlayPlayer swipe handlers +- Audiobook/podcast position showing 0:00 after page refresh instead of saved progress +- Volume slider showing no visual fill indicator for current level +- PWA install prompt reappearing after user dismissal + +### Changed + +- Audio analyzer default workers reduced from auto-scale to 2 (memory conservative) +- Audio analyzer Docker memory limits: 6GB limit, 2GB reservation +- Download status polling intervals: 5s (active) / 10s (idle) / 30s (none), previously 15s +- Library pagination options changed to 24/40/80/200 (divisible by 8-column grid) +- Lidarr download failure detection now has 90-second grace period (3 checks) +- Lidarr catalog population timeout increased from 45s to 60s +- Download notifications now use API-driven state instead of local pending state +- Enrichment stop button now gracefully finishes current item before stopping +- Per-album enrichment triggers immediately instead of waiting for batch completion +- Lidarr edition variant detection now proactive (enables `anyReleaseOk` before first search) +- Discovery system now uses AcquisitionService for unified album/track acquisition +- Podcast and audiobook time display now shows time remaining instead of total duration +- Edition variant albums automatically fall back to base title search when edition-specific search fails +- Stale pending downloads cleaned up after 2 minutes (was indefinite) +- Download source detection now prioritizes actual service availability over user preference + +### Removed + +- Artist delete buttons hidden on mobile to prevent accidental deletion +- Audio analyzer models volume mount (shadowed built-in models) + +### Database Migrations Required + +```bash +# Run Prisma migrations +cd backend +npx prisma migrate deploy +``` + +**New Schema Fields:** + +- `Album.originalYear` - Stores original release year (separate from remaster dates) +- `SystemSettings.enrichmentConcurrency` - User-configurable enrichment speed (1-5) +- `SystemSettings.downloadSource` - Primary download source selection +- `SystemSettings.primaryFailureFallback` - Fallback behavior on primary source failure +- `SystemSettings.lidarrWebhookSecret` - Shared secret for Lidarr webhook signature verification +- `User.tokenVersion` - Version number for JWT token invalidation on password change +- `DownloadJob.targetMbid` - Index added for improved query performance + +**Backfill Script (Optional):** + +```bash +# Backfill originalYear for existing albums +cd backend +npx ts-node scripts/backfill-original-year.ts +``` + +### Breaking Changes + +- None - All changes are backward compatible + +### Security + +- **Critical:** Bull Board admin dashboard now requires authenticated admin user +- **Critical:** Lidarr webhooks verify signature/secret before processing requests +- **Critical:** Encryption key validation on startup prevents insecure defaults +- Session cookies use secure settings in production (httpOnly, sameSite=strict, secure) +- Swagger API documentation requires authentication in production (unless `DOCS_PUBLIC=true`) +- JWT tokens have proper expiration (24h access, 30d refresh) with refresh token support +- Password changes invalidate all existing tokens via tokenVersion increment +- Transaction-based download job creation prevents race conditions +- Enrichment stop control no longer bypassed by worker state +- Download queue webhook handlers use Serializable isolation transactions +- Webhook race conditions protected with exponential backoff retry logic + +--- + +## Release Notes + +When deploying this update: + +1. **Backup your database** before running migrations +2. **Set required environment variable** (if not already set): + ```bash + # Generate secure encryption key + SETTINGS_ENCRYPTION_KEY=$(openssl rand -base64 32) + ``` +3. Run `npx prisma migrate deploy` in the backend directory +4. Optionally run the originalYear backfill script for era mix accuracy: + ```bash + cd backend + npx ts-node scripts/backfill-original-year.ts + ``` +5. Clear Docker volumes for audio-analyzer if experiencing model issues: + ```bash + docker volume rm lidify_audio_analyzer_models 2>/dev/null || true + docker compose build audio-analyzer --no-cache + ``` +6. Review Settings → Downloads for new multi-source download options +7. Review Settings → Cache for new enrichment speed control +8. Configure Lidarr webhook secret in Settings for webhook signature verification (recommended) +9. Review Settings → Security for JWT token settings + +### Known Issues + +- Pre-existing TypeScript errors in spotifyImport.ts matchTrack method (unrelated to this release) +- Simon & Garfunkel artist name may be truncated due to short second part (edge case, not blocking) + +### Contributors + +Big thanks to everyone who contributed, tested, and helped make this release happen: + +- @tombatossals - LastFM API normalization utility ([#39](https://github.com/Chevron7Locked/lidify/pull/39)), playlist visibility toggle fix ([#49](https://github.com/Chevron7Locked/lidify/pull/49)) +- @RustyJonez - Mood bucket standard mode keyword scoring ([#47](https://github.com/Chevron7Locked/lidify/pull/47)) +- @iamiq - Audio analyzer crash reporting ([#2](https://github.com/Chevron7Locked/lidify/issues/2)) +- @volcs0 - Memory pressure testing ([#26](https://github.com/Chevron7Locked/lidify/issues/26)) +- @Osiriz - Long-running analysis testing ([#21](https://github.com/Chevron7Locked/lidify/issues/21)) +- @hessonam - Non-ASCII character testing ([#6](https://github.com/Chevron7Locked/lidify/issues/6)) +- @niles - RhythmExtractor edge case reporting ([#13](https://github.com/Chevron7Locked/lidify/issues/13)) +- @TheChrisK - Metadata editor bug reporting ([#9](https://github.com/Chevron7Locked/lidify/issues/9)) +- @lizar93 - Discovery playlist testing ([#34](https://github.com/Chevron7Locked/lidify/issues/34)) +- @brokenglasszero - Mood tags feature verification ([#35](https://github.com/Chevron7Locked/lidify/issues/35)) + +And all users who reported bugs, tested fixes, and provided feedback! + +--- + +For detailed technical implementation notes, see [docs/PENDING_DEPLOY.md](docs/PENDING_DEPLOY.md). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..b353b9a --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,63 @@ +# Contributing to Lidify + +First off, thanks for taking the time to contribute! 🎉 + +## Getting Started + +1. Fork the repository +2. Clone your fork locally +3. Set up the development environment (see README.md) +4. Create a new branch from `main` for your changes + +## Branch Strategy + +All development happens on the `main` branch: + +- **All PRs should target `main`** +- Every push to `main` triggers a nightly Docker build +- Stable releases are created via version tags + +## Making Contributions + +### Bug Fixes + +1. Check existing issues to see if the bug has been reported +2. If not, open a bug report issue first +3. Fork, branch, fix, and submit a PR referencing the issue + +### Small Enhancements + +1. Open a feature request issue to discuss first +2. Keep changes focused and minimal + +### Large Features + +Please open an issue to discuss before starting work. + +## Code Style + +### Frontend + +The frontend uses ESLint. Before submitting a PR: + +```bash +cd frontend +npm run lint +``` + +### Backend + +Follow existing code patterns and TypeScript conventions. + +## Pull Request Process + +1. **Target the `main` branch** +2. Fill out the PR template completely +3. Ensure the Docker build check passes +4. Wait for review - we'll provide feedback or approve + +## Questions? + +Open a Discussion thread for questions that aren't bugs or feature requests. + +Thanks for contributing! diff --git a/Dockerfile b/Dockerfile index b38c8fb..b0e9b9e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -48,35 +48,73 @@ RUN pip3 install --no-cache-dir --break-system-packages \ psycopg2-binary # Download Essentia ML models (~200MB total) - these enable Enhanced vibe matching +# IMPORTANT: Using MusiCNN models to match analyzer.py expectations RUN echo "Downloading Essentia ML models for Enhanced vibe matching..." && \ - # Base embedding model (required for all predictions) - curl -L --progress-bar -o /app/models/discogs-effnet-bs64-1.pb \ - "https://essentia.upf.edu/models/feature-extractors/discogs-effnet/discogs-effnet-bs64-1.pb" && \ - # Mood models - curl -L --progress-bar -o /app/models/mood_happy-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/mood_happy/mood_happy-discogs-effnet-1.pb" && \ - curl -L --progress-bar -o /app/models/mood_sad-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/mood_sad/mood_sad-discogs-effnet-1.pb" && \ - curl -L --progress-bar -o /app/models/mood_relaxed-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/mood_relaxed/mood_relaxed-discogs-effnet-1.pb" && \ - curl -L --progress-bar -o /app/models/mood_aggressive-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/mood_aggressive/mood_aggressive-discogs-effnet-1.pb" && \ - # Arousal and Valence (key for vibe matching) - curl -L --progress-bar -o /app/models/mood_arousal-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/mood_arousal/mood_arousal-discogs-effnet-1.pb" && \ - curl -L --progress-bar -o /app/models/mood_valence-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/mood_valence/mood_valence-discogs-effnet-1.pb" && \ - # Danceability and Voice/Instrumental - curl -L --progress-bar -o /app/models/danceability-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/danceability/danceability-discogs-effnet-1.pb" && \ - curl -L --progress-bar -o /app/models/voice_instrumental-discogs-effnet-1.pb \ - "https://essentia.upf.edu/models/classification-heads/voice_instrumental/voice_instrumental-discogs-effnet-1.pb" && \ + # Base MusiCNN embedding model (required for all predictions) + curl -L --progress-bar -o /app/models/msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/autotagging/msd/msd-musicnn-1.pb" && \ + # Mood classification heads (using MusiCNN architecture) + curl -L --progress-bar -o /app/models/mood_happy-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_happy/mood_happy-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/mood_sad-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_sad/mood_sad-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/mood_relaxed-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_relaxed/mood_relaxed-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/mood_aggressive-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_aggressive/mood_aggressive-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/mood_party-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_party/mood_party-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/mood_acoustic-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_acoustic/mood_acoustic-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/mood_electronic-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/mood_electronic/mood_electronic-msd-musicnn-1.pb" && \ + # Other classification heads + curl -L --progress-bar -o /app/models/danceability-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/danceability/danceability-msd-musicnn-1.pb" && \ + curl -L --progress-bar -o /app/models/voice_instrumental-msd-musicnn-1.pb \ + "https://essentia.upf.edu/models/classification-heads/voice_instrumental/voice_instrumental-msd-musicnn-1.pb" && \ echo "ML models downloaded successfully" && \ ls -lh /app/models/ # Copy audio analyzer script COPY services/audio-analyzer/analyzer.py /app/audio-analyzer/ +# Create database readiness check script +RUN cat > /app/wait-for-db.sh << 'EOF' +#!/bin/bash +TIMEOUT=${1:-120} +COUNTER=0 + +echo "[wait-for-db] Waiting for database schema (timeout: ${TIMEOUT}s)..." + +# Quick check for schema ready flag +if [ -f /data/.schema_ready ]; then + echo "[wait-for-db] Schema ready flag found, verifying connection..." +fi + +while [ $COUNTER -lt $TIMEOUT ]; do + if PGPASSWORD=lidify psql -h localhost -U lidify -d lidify -c "SELECT 1 FROM \"Track\" LIMIT 1" > /dev/null 2>&1; then + echo "[wait-for-db] ✓ Database is ready and schema exists!" + exit 0 + fi + + if [ $((COUNTER % 15)) -eq 0 ]; then + echo "[wait-for-db] Still waiting... (${COUNTER}s elapsed)" + fi + + sleep 1 + COUNTER=$((COUNTER + 1)) +done + +echo "[wait-for-db] ERROR: Database schema not ready after ${TIMEOUT}s" +echo "[wait-for-db] Listing available tables:" +PGPASSWORD=lidify psql -h localhost -U lidify -d lidify -c "\dt" 2>&1 || echo "Could not list tables" +exit 1 +EOF + +RUN chmod +x /app/wait-for-db.sh && \ + sed -i 's/\r$//' /app/wait-for-db.sh + # ============================================ # BACKEND BUILD # ============================================ @@ -164,9 +202,11 @@ stderr_logfile_maxbytes=0 priority=20 [program:backend] -command=/bin/bash -c "sleep 5 && cd /app/backend && npx tsx src/index.ts" +command=/bin/bash -c "/app/wait-for-db.sh 120 && cd /app/backend && npx tsx src/index.ts" autostart=true -autorestart=true +autorestart=unexpected +startretries=3 +startsecs=10 stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr @@ -186,9 +226,11 @@ environment=NODE_ENV="production",BACKEND_URL="http://localhost:3006",PORT="3030 priority=40 [program:audio-analyzer] -command=/bin/bash -c "sleep 15 && cd /app/audio-analyzer && python3 analyzer.py" +command=/bin/bash -c "/app/wait-for-db.sh 120 && cd /app/audio-analyzer && python3 analyzer.py" autostart=true -autorestart=true +autorestart=unexpected +startretries=3 +startsecs=10 stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr @@ -271,32 +313,53 @@ MIGRATIONS_EXIST=$(gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT FROM # Check if User table exists (indicates existing data) USER_TABLE_EXIST=$(gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'User')" 2>/dev/null || echo "f") +# Handle rename migration for existing databases +echo "Checking if rename migration needs to be marked as applied..." +if gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='SystemSettings' AND column_name='soulseekFallback');" 2>/dev/null | grep -q 't'; then + echo "Old column exists, marking migration as applied..." + gosu postgres psql -d lidify -c "INSERT INTO \"_prisma_migrations\" (id, checksum, finished_at, migration_name, logs, rolled_back_at, started_at, applied_steps_count) VALUES (gen_random_uuid(), '', NOW(), '20250101000000_rename_soulseek_fallback', '', NULL, NOW(), 1) ON CONFLICT DO NOTHING;" 2>/dev/null || true +fi + if [ "$MIGRATIONS_EXIST" = "t" ]; then # Normal migration flow - migrations table exists echo "Migration history found, running migrate deploy..." - npx prisma migrate deploy 2>&1 || { - echo "WARNING: Migration failed, but database preserved." - echo "You may need to manually resolve migration issues." - } + if ! npx prisma migrate deploy 2>&1; then + echo "FATAL: Database migration failed! Check logs above." + exit 1 + fi elif [ "$USER_TABLE_EXIST" = "t" ]; then # Database has data but no migrations table - needs baseline echo "Existing database detected without migration history." echo "Creating baseline from current schema..." # Mark the init migration as already applied (baseline) - npx prisma migrate resolve --applied 20251130000000_init 2>&1 || true + npx prisma migrate resolve --applied 20241130000000_init 2>&1 || true # Now run any subsequent migrations - npx prisma migrate deploy 2>&1 || { - echo "WARNING: Migration after baseline failed." - echo "Database preserved - check migration status manually." - } + if ! npx prisma migrate deploy 2>&1; then + echo "FATAL: Migration after baseline failed!" + exit 1 + fi else # Fresh database - run migrations normally echo "Fresh database detected, running initial migrations..." - npx prisma migrate deploy 2>&1 || { - echo "WARNING: Initial migration failed." - echo "Check database connection and schema." - } + if ! npx prisma migrate deploy 2>&1; then + echo "FATAL: Initial migration failed. Check database connection and schema." + exit 1 + fi fi +echo "✓ Migrations completed successfully" + +# Verify schema exists before starting services +echo "Verifying database schema..." +if ! gosu postgres psql -d lidify -c "SELECT 1 FROM \"Track\" LIMIT 1" >/dev/null 2>&1; then + echo "FATAL: Track table does not exist after migration!" + echo "Database schema verification failed. Container will exit." + exit 1 +fi +echo "✓ Schema verification passed" + +# Create flag file for wait-for-db.sh +touch /data/.schema_ready +echo "✓ Schema ready flag created" # Stop PostgreSQL (supervisord will start it) gosu postgres $PG_BIN/pg_ctl -D /data/postgres -w stop @@ -338,7 +401,12 @@ SETTINGS_ENCRYPTION_KEY=$SETTINGS_ENCRYPTION_KEY ENVEOF echo "Starting Lidify..." -exec /usr/bin/supervisord -c /etc/supervisor/supervisord.conf +exec env \ + NODE_ENV=production \ + DATABASE_URL="postgresql://lidify:lidify@localhost:5432/lidify" \ + SESSION_SECRET="$SESSION_SECRET" \ + SETTINGS_ENCRYPTION_KEY="$SETTINGS_ENCRYPTION_KEY" \ + /usr/bin/supervisord -c /etc/supervisor/supervisord.conf EOF # Fix Windows line endings (CRLF -> LF) and make executable diff --git a/README.md b/README.md index 51f3b8c..44db151 100644 --- a/README.md +++ b/README.md @@ -276,6 +276,34 @@ Lidify will begin scanning your music library automatically. Depending on the si --- +## Release Channels + +Lidify offers two release channels to match your stability preferences: + +### 🟢 Stable (Recommended) + +Production-ready releases. Updated when new stable versions are released. + +```bash +docker pull chevron7locked/lidify:latest +# or specific version +docker pull chevron7locked/lidify:v1.2.0 +``` + +### 🔴 Nightly (Development) + +Latest development build. Built on every push to main. + +⚠️ **Not recommended for production** - may be unstable or broken. + +```bash +docker pull chevron7locked/lidify:nightly +``` + +**For contributors:** See [`CONTRIBUTING.md`](CONTRIBUTING.md) for information on submitting pull requests and contributing to Lidify. + +--- + ## Configuration ### Environment Variables @@ -287,8 +315,7 @@ The unified Lidify container handles most configuration automatically. Here are | `SESSION_SECRET` | Auto-generated | Session encryption key (recommended to set for persistence across restarts) | | `TZ` | `UTC` | Timezone for the container | | `LIDIFY_CALLBACK_URL` | `http://host.docker.internal:3030` | URL for Lidarr webhook callbacks (see [Lidarr integration](#lidarr)) | -| `NUM_WORKERS` | Auto-generated (50% of CPU Cores) | Limit the amount of workers being used in Auto Analysis. | - +| `NUM_WORKERS` | `2` | Number of parallel workers for audio analysis | The music library path is configured via Docker volume mount (`-v /path/to/music:/music`). @@ -314,42 +341,40 @@ ALLOWED_ORIGINS=http://localhost:3030,https://lidify.yourdomain.com Lidify uses several sensitive environment variables. Never commit your `.env` file. -| Variable | Purpose | Required | -| ------------------------- | ------------------------------ | ------------------ | -| `SESSION_SECRET` | Session encryption (32+ chars) | Yes | -| `SETTINGS_ENCRYPTION_KEY` | Encrypts stored credentials | Recommended | -| `SOULSEEK_USERNAME` | Soulseek login | If u sing Soulseek | -| `SOULSEEK_PASSWORD`- | Soulseek password - | If using S-oulseek | -| `LIDARR_AP I_KEY` | Lidarr integration | If using L idarr | -| `OPENAI_API_KEY` | AI features | Optional | -| `LASTFM_API_KEY ` | Artist recommendations | Optional | -| `FANART_API_KEY` | Artist images | Optional | +| Variable | Purpose | Required | +| ------------------------- | ------------------------------ | ----------------- | +| `SESSION_SECRET` | Session encryption (32+ chars) | Yes | +| `SETTINGS_ENCRYPTION_KEY` | Encrypts stored credentials | Recommended | +| `SOULSEEK_USERNAME` | Soulseek login | If using Soulseek | +| `SOULSEEK_PASSWORD` | Soulseek password | If using Soulseek | +| `LIDARR_API_KEY` | Lidarr integration | If using Lidarr | +| `OPENAI_API_KEY` | AI features | Optional | +| `LASTFM_API_KEY` | Artist recommendations | Optional | +| `FANART_API_KEY` | Artist images | Optional | -### VPN Configurati on (Optional) +### VPN Configuration (Optional) If using Mullvad VPN for Soulseek: -- Place Wi reGuard config in `ba ckend/mullvad/` (gitignored) -- Never commit VPN cred entials or private keys -- The `*.conf` and `key.txt` patterns are already in .git ignore +- Place WireGuard config in `backend/mullvad/` (gitignored) +- Never commit VPN credentials or private keys +- The `*.conf` and `key.txt` patterns are already in .gitignore ### Generating Secrets -```bas h +```bash # Generate a secure session secret -openss l rand - base64 32 +openssl rand -base64 32 # Generate encryption key openssl rand -hex 32 ``` -### Network - -Sec urity +### Network Security - Lidify is designed for self-hosted LAN use -- For exte rnal access, use a reverse proxy with HTTPS -- C o nfigure `ALLOWED_ORIGINS` for your domain +- For external access, use a reverse proxy with HTTPS +- Configure `ALLOWED_ORIGINS` for your domain --- @@ -359,12 +384,12 @@ Lidify works beautifully on its own, but it becomes even more powerful when conn ### Lidarr -Connect Lidify to your Lidarr instance to request and downloa d new music directly from the app. +Connect Lidify to your Lidarr instance to request and download new music directly from the app. **What you get:** - Browse artists and albums you don't own -- Request downloads with a single click +- Request downloads with a single click - Discover Weekly playlists that automatically download new recommendations - Automatic library sync when Lidarr finishes importing diff --git a/backend/package-lock.json b/backend/package-lock.json index c22d6c7..2c9dbcb 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -1,12 +1,12 @@ { "name": "lidify-backend", - "version": "1.0.0", + "version": "1.3.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "lidify-backend", - "version": "1.0.0", + "version": "1.3.0", "license": "GPL-3.0", "dependencies": { "@bull-board/api": "^6.14.2", @@ -37,6 +37,7 @@ "jsonwebtoken": "^9.0.2", "music-metadata": "^11.10.0", "node-cron": "^4.2.1", + "p-limit": "^7.2.0", "p-queue": "^9.0.0", "podcast-index-api": "^1.1.10", "qrcode": "^1.5.4", @@ -51,6 +52,7 @@ }, "devDependencies": { "@types/bcrypt": "^5.0.2", + "@types/cors": "^2.8.19", "@types/express": "^4.17.21", "@types/express-session": "^1.17.10", "@types/jsonwebtoken": "^9.0.10", @@ -105,9 +107,9 @@ } }, "node_modules/@borewit/text-codec": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.2.0.tgz", - "integrity": "sha512-X999CKBxGwX8wW+4gFibsbiNdwqmdQEXmUejIWaIqdrHBgS5ARIOOeyiQbHjP9G58xVEPcuvP6VwwH3A0OFTOA==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.2.1.tgz", + "integrity": "sha512-k7vvKPbf7J2fZ5klGRD9AeKfUvojuZIQ3BT5u7Jfv+puwXkUBUT5PVyMDfJZpy30CBDXGMgw7fguK/lpOMBvgw==", "license": "MIT", "funding": { "type": "github", @@ -115,25 +117,25 @@ } }, "node_modules/@bull-board/api": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@bull-board/api/-/api-6.15.0.tgz", - "integrity": "sha512-z8qLZ4uv83hZNu+0YnHzhVoWv1grULuYh80FdC2xXLg8M1EwsOZD9cJ5CNpgBFqHb+NVByTmf5FltIvXdOU8tQ==", + "version": "6.16.2", + "resolved": "https://registry.npmjs.org/@bull-board/api/-/api-6.16.2.tgz", + "integrity": "sha512-d3kDf91FeMw/wYp8FOZJjX4hVqZEmomXtYgNRdZc0a5gTR2bmomvpwJtNBinu2lyIRFoX/Rxilz+CZ6xyw3drQ==", "license": "MIT", "dependencies": { "redis-info": "^3.1.0" }, "peerDependencies": { - "@bull-board/ui": "6.15.0" + "@bull-board/ui": "6.16.2" } }, "node_modules/@bull-board/express": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@bull-board/express/-/express-6.15.0.tgz", - "integrity": "sha512-c/nnxr5evLNgqoSSEvTwPb+6WaTB3PN3Bq2oMTBtwCUJlZr+s1UX7gx0wVIYHjeZyUdYR7fX7hhh2cRLO5vqeg==", + "version": "6.16.2", + "resolved": "https://registry.npmjs.org/@bull-board/express/-/express-6.16.2.tgz", + "integrity": "sha512-RYjWmRpixgoRVJf4/iZuwbst4EML8EnL+S2vyIn6uE0iqCXFBV63oEYJAhoEA7P50IrrktVBOU2/qTdsbih18g==", "license": "MIT", "dependencies": { - "@bull-board/api": "6.15.0", - "@bull-board/ui": "6.15.0", + "@bull-board/api": "6.16.2", + "@bull-board/ui": "6.16.2", "ejs": "^3.1.10", "express": "^5.2.0" } @@ -430,12 +432,12 @@ } }, "node_modules/@bull-board/ui": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@bull-board/ui/-/ui-6.15.0.tgz", - "integrity": "sha512-bb/j6VMq2cfPoE/ZiUO7AcYTL0IjtxvKxkYV0zu+i1pc+JEv3ct4BItCII57knJR/YjZKGmdfr079KJFvzXC5A==", + "version": "6.16.2", + "resolved": "https://registry.npmjs.org/@bull-board/ui/-/ui-6.16.2.tgz", + "integrity": "sha512-L8ylgyJqiCrngne9GvX6zqALXnSLhzGBRaPnmO5y7Ev6K9w84EkcfhzcNw4qNH4SJAdcOm3HVf15dBU2Wznbug==", "license": "MIT", "dependencies": { - "@bull-board/api": "6.15.0" + "@bull-board/api": "6.16.2" } }, "node_modules/@derhuerst/http-basic": { @@ -454,9 +456,9 @@ } }, "node_modules/@emnapi/runtime": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.7.1.tgz", - "integrity": "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", "license": "MIT", "optional": true, "dependencies": { @@ -1497,9 +1499,9 @@ } }, "node_modules/@ioredis/commands": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.4.0.tgz", - "integrity": "sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.5.0.tgz", + "integrity": "sha512-eUgLqrMf8nJkZxT24JvVRrQya1vZkQh8BBeYNwGDqa5I0VUi8ACx7uFvAaLxintokpTenkK6DASvo/bvNbBGow==", "license": "MIT" }, "node_modules/@jsdevtools/ono": { @@ -1861,6 +1863,16 @@ "@types/node": "*" } }, + "node_modules/@types/cors": { + "version": "2.8.19", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", + "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/express": { "version": "4.17.25", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.25.tgz", @@ -3018,9 +3030,9 @@ } }, "node_modules/file-type": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.1.1.tgz", - "integrity": "sha512-ifJXo8zUqbQ/bLbl9sFoqHNTNWbnPY1COImFfM6CCy7z+E+jC1eY9YfOKkx0fckIg+VljAy2/87T61fp0+eEkg==", + "version": "21.3.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.3.0.tgz", + "integrity": "sha512-8kPJMIGz1Yt/aPEwOsrR97ZyZaD1Iqm8PClb1nYFclUCkBi0Ma5IsYNQzvSFS9ib51lWyIw5mIT9rWzI/xjpzA==", "license": "MIT", "dependencies": { "@tokenizer/inflate": "^0.4.1", @@ -3604,12 +3616,12 @@ "license": "ISC" }, "node_modules/ioredis": { - "version": "5.8.2", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.8.2.tgz", - "integrity": "sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q==", + "version": "5.9.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.9.0.tgz", + "integrity": "sha512-T3VieIilNumOJCXI9SDgo4NnF6sZkd6XcmPi6qWtw4xqbt8nNz/ZVNiIH1L9puMTSHZh1mUWA4xKa2nWPF4NwQ==", "license": "MIT", "dependencies": { - "@ioredis/commands": "1.4.0", + "@ioredis/commands": "1.5.0", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", @@ -4096,9 +4108,9 @@ } }, "node_modules/music-metadata": { - "version": "11.10.3", - "resolved": "https://registry.npmjs.org/music-metadata/-/music-metadata-11.10.3.tgz", - "integrity": "sha512-j0g/x4cNNZW6I5gdcPAY+GFkJY9WHTpkFDMBJKQLxJQyvSfQbXm57fTE3haGFFuOzCgtsTd4Plwc49Sn9RacDQ==", + "version": "11.10.5", + "resolved": "https://registry.npmjs.org/music-metadata/-/music-metadata-11.10.5.tgz", + "integrity": "sha512-G0i86zpL7AARmZx8XEkHBVf7rJMQDFfGEFc1C83//rKHGuaK0gwxmNNeo9mjm4g07KUwoT0s0dW7g5QwZhi+qQ==", "funding": [ { "type": "github", @@ -4111,14 +4123,14 @@ ], "license": "MIT", "dependencies": { - "@borewit/text-codec": "^0.2.0", + "@borewit/text-codec": "^0.2.1", "@tokenizer/token": "^0.3.0", "content-type": "^1.0.5", "debug": "^4.4.3", - "file-type": "^21.1.1", + "file-type": "^21.2.0", "media-typer": "^1.1.0", "strtok3": "^10.3.4", - "token-types": "^6.1.1", + "token-types": "^6.1.2", "uint8array-extras": "^1.5.0" }, "engines": { @@ -4315,15 +4327,15 @@ } }, "node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-7.2.0.tgz", + "integrity": "sha512-ATHLtwoTNDloHRFFxFJdHnG6n2WUeFjaR8XQMFdKIv0xkXjrER8/iG9iu265jOM95zXHAfv9oTkqhrfbIzosrQ==", "license": "MIT", "dependencies": { - "p-try": "^2.0.0" + "yocto-queue": "^1.2.1" }, "engines": { - "node": ">=6" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -4341,10 +4353,25 @@ "node": ">=8" } }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/p-queue": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-9.0.1.tgz", - "integrity": "sha512-RhBdVhSwJb7Ocn3e8ULk4NMwBEuOxe+1zcgphUy9c2e5aR/xbEsdVXxHJ3lynw6Qiqu7OINEyHlZkiblEpaq7w==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-9.1.0.tgz", + "integrity": "sha512-O/ZPaXuQV29uSLbxWBGGZO1mCQXV2BLIwUr59JUU9SoH76mnYvtms7aafH/isNSNGwuEfP6W/4xD0/TJXxrizw==", "license": "MIT", "dependencies": { "eventemitter3": "^5.0.1", @@ -4516,9 +4543,9 @@ } }, "node_modules/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "version": "6.14.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz", + "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==", "license": "BSD-3-Clause", "dependencies": { "side-channel": "^1.1.0" @@ -5211,12 +5238,12 @@ } }, "node_modules/token-types": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.1.tgz", - "integrity": "sha512-kh9LVIWH5CnL63Ipf0jhlBIy0UsrMj/NJDfpsy1SqOXlLKEVyXXYrnFxFT1yOOYVGBSApeVnjPw/sBz5BfEjAQ==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.2.tgz", + "integrity": "sha512-dRXchy+C0IgK8WPC6xvCHFRIWYUbqqdEIKPaKo/AcTUNzwLTK6AH7RjdLWsEZcAN/TBdtfUw3PYEgPr5VPr6ww==", "license": "MIT", "dependencies": { - "@borewit/text-codec": "^0.1.0", + "@borewit/text-codec": "^0.2.1", "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" }, @@ -5228,16 +5255,6 @@ "url": "https://github.com/sponsors/Borewit" } }, - "node_modules/token-types/node_modules/@borewit/text-codec": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.1.1.tgz", - "integrity": "sha512-5L/uBxmjaCIX5h8Z+uu+kA9BQLkc/Wl06UGR5ajNRxu+/XjonB5i8JpgFMrPj3LXTCPA0pv8yxUvbUi+QthGGA==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - } - }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", @@ -5535,6 +5552,18 @@ "node": ">=6" } }, + "node_modules/yocto-queue": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz", + "integrity": "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==", + "license": "MIT", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/z-schema": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-5.0.5.tgz", diff --git a/backend/package.json b/backend/package.json index fa97158..73a5bca 100644 --- a/backend/package.json +++ b/backend/package.json @@ -1,6 +1,6 @@ { "name": "lidify-backend", - "version": "1.2.0", + "version": "1.3.0", "description": "Lidify backend API server", "license": "GPL-3.0", "repository": { @@ -46,6 +46,7 @@ "jsonwebtoken": "^9.0.2", "music-metadata": "^11.10.0", "node-cron": "^4.2.1", + "p-limit": "^7.2.0", "p-queue": "^9.0.0", "podcast-index-api": "^1.1.10", "qrcode": "^1.5.4", @@ -60,6 +61,7 @@ }, "devDependencies": { "@types/bcrypt": "^5.0.2", + "@types/cors": "^2.8.19", "@types/express": "^4.17.21", "@types/express-session": "^1.17.10", "@types/jsonwebtoken": "^9.0.10", diff --git a/backend/prisma/migrations/20250101000000_rename_soulseek_fallback/migration.sql b/backend/prisma/migrations/20250101000000_rename_soulseek_fallback/migration.sql new file mode 100644 index 0000000..9b64529 --- /dev/null +++ b/backend/prisma/migrations/20250101000000_rename_soulseek_fallback/migration.sql @@ -0,0 +1,10 @@ +-- Rename soulseekFallback to primaryFailureFallback (idempotent) +DO $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'SystemSettings' AND column_name = 'soulseekFallback' + ) THEN + ALTER TABLE "SystemSettings" RENAME COLUMN "soulseekFallback" TO "primaryFailureFallback"; + END IF; +END $$; diff --git a/backend/prisma/migrations/20250102000000_add_user_token_version/migration.sql b/backend/prisma/migrations/20250102000000_add_user_token_version/migration.sql new file mode 100644 index 0000000..41110cd --- /dev/null +++ b/backend/prisma/migrations/20250102000000_add_user_token_version/migration.sql @@ -0,0 +1,11 @@ +-- Add tokenVersion to User table (idempotent) +DO $$ +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'User') + AND NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'User' AND column_name = 'tokenVersion' + ) THEN + ALTER TABLE "User" ADD COLUMN "tokenVersion" INTEGER NOT NULL DEFAULT 0; + END IF; +END $$; diff --git a/backend/prisma/migrations/20250102000001_add_downloadjob_targetmbid_index/migration.sql b/backend/prisma/migrations/20250102000001_add_downloadjob_targetmbid_index/migration.sql new file mode 100644 index 0000000..ed83b6d --- /dev/null +++ b/backend/prisma/migrations/20250102000001_add_downloadjob_targetmbid_index/migration.sql @@ -0,0 +1,11 @@ +-- Create targetMbid index on DownloadJob (idempotent) +DO $$ +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'DownloadJob') + AND NOT EXISTS ( + SELECT 1 FROM pg_indexes + WHERE tablename = 'DownloadJob' AND indexname = 'DownloadJob_targetMbid_idx' + ) THEN + CREATE INDEX "DownloadJob_targetMbid_idx" ON "DownloadJob"("targetMbid"); + END IF; +END $$; diff --git a/backend/prisma/migrations/20251130000000_init/migration.sql b/backend/prisma/migrations/20251130000000_init/migration.sql index 27f848c..2c744c0 100644 --- a/backend/prisma/migrations/20251130000000_init/migration.sql +++ b/backend/prisma/migrations/20251130000000_init/migration.sql @@ -19,6 +19,7 @@ CREATE TABLE "User" ( "twoFactorSecret" TEXT, "twoFactorRecoveryCodes" TEXT, "moodMixParams" JSONB, + "tokenVersion" INTEGER NOT NULL DEFAULT 0, "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT "User_pkey" PRIMARY KEY ("id") @@ -78,7 +79,7 @@ CREATE TABLE "SystemSettings" ( "downloadRetryAttempts" INTEGER NOT NULL DEFAULT 3, "transcodeCacheMaxGb" INTEGER NOT NULL DEFAULT 10, "downloadSource" TEXT NOT NULL DEFAULT 'soulseek', - "soulseekFallback" TEXT NOT NULL DEFAULT 'none', + "primaryFailureFallback" TEXT NOT NULL DEFAULT 'none', "updatedAt" TIMESTAMP(3) NOT NULL, "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -826,6 +827,9 @@ CREATE INDEX "DownloadJob_lidarrRef_idx" ON "DownloadJob"("lidarrRef"); -- CreateIndex CREATE INDEX "DownloadJob_artistMbid_idx" ON "DownloadJob"("artistMbid"); +-- CreateIndex +CREATE INDEX "DownloadJob_targetMbid_idx" ON "DownloadJob"("targetMbid"); + -- CreateIndex CREATE INDEX "ListeningState_userId_idx" ON "ListeningState"("userId"); diff --git a/backend/prisma/migrations/20251229004706_add_enrichment_concurrency/migration.sql b/backend/prisma/migrations/20251229004706_add_enrichment_concurrency/migration.sql new file mode 100644 index 0000000..ca28e8a --- /dev/null +++ b/backend/prisma/migrations/20251229004706_add_enrichment_concurrency/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "SystemSettings" ADD COLUMN "enrichmentConcurrency" INTEGER NOT NULL DEFAULT 1; diff --git a/backend/prisma/migrations/20251229043907_add_metadata_overrides/migration.sql b/backend/prisma/migrations/20251229043907_add_metadata_overrides/migration.sql new file mode 100644 index 0000000..ca8f9c8 --- /dev/null +++ b/backend/prisma/migrations/20251229043907_add_metadata_overrides/migration.sql @@ -0,0 +1,27 @@ +-- AlterTable +ALTER TABLE "Album" ADD COLUMN "displayTitle" TEXT, +ADD COLUMN "displayYear" INTEGER, +ADD COLUMN "hasUserOverrides" BOOLEAN NOT NULL DEFAULT false, +ADD COLUMN "userCoverUrl" TEXT, +ADD COLUMN "userGenres" JSONB; + +-- AlterTable +ALTER TABLE "Artist" ADD COLUMN "displayName" TEXT, +ADD COLUMN "hasUserOverrides" BOOLEAN NOT NULL DEFAULT false, +ADD COLUMN "userGenres" JSONB, +ADD COLUMN "userHeroUrl" TEXT, +ADD COLUMN "userSummary" TEXT; + +-- AlterTable +ALTER TABLE "Track" ADD COLUMN "displayTitle" TEXT, +ADD COLUMN "displayTrackNo" INTEGER, +ADD COLUMN "hasUserOverrides" BOOLEAN NOT NULL DEFAULT false; + +-- CreateIndex +CREATE INDEX "Album_hasUserOverrides_idx" ON "Album"("hasUserOverrides"); + +-- CreateIndex +CREATE INDEX "Artist_hasUserOverrides_idx" ON "Artist"("hasUserOverrides"); + +-- CreateIndex +CREATE INDEX "Track_hasUserOverrides_idx" ON "Track"("hasUserOverrides"); diff --git a/backend/prisma/migrations/20251230000000_add_podcast_audiobook_search_vectors/migration.sql b/backend/prisma/migrations/20251230000000_add_podcast_audiobook_search_vectors/migration.sql new file mode 100644 index 0000000..1b13944 --- /dev/null +++ b/backend/prisma/migrations/20251230000000_add_podcast_audiobook_search_vectors/migration.sql @@ -0,0 +1,128 @@ +-- Migration: Add search vector triggers for podcasts and audiobooks +-- This migration creates PostgreSQL functions and triggers to automatically +-- populate and maintain search vectors for podcast and audiobook content + +-- ============================================================================ +-- PODCAST SEARCH VECTOR FUNCTION +-- ============================================================================ +-- Function to generate Podcast search vector from title, author, and description +CREATE OR REPLACE FUNCTION podcast_search_vector_trigger() RETURNS trigger AS $$ +BEGIN + -- Combine title, author, and description into search vector + -- Using setweight: title (A), author (B), description (C) + NEW."searchVector" := + setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') || + setweight(to_tsvector('english', COALESCE(NEW.author, '')), 'B') || + setweight(to_tsvector('english', COALESCE(NEW.description, '')), 'C'); + + RETURN NEW; +END +$$ LANGUAGE plpgsql; + +-- Create trigger to auto-update Podcast search vector +DROP TRIGGER IF EXISTS podcast_search_vector_update ON "Podcast"; +CREATE TRIGGER podcast_search_vector_update + BEFORE INSERT OR UPDATE OF title, author, description + ON "Podcast" + FOR EACH ROW + EXECUTE FUNCTION podcast_search_vector_trigger(); + +-- ============================================================================ +-- PODCAST EPISODE SEARCH VECTOR FUNCTION +-- ============================================================================ +-- Function to generate PodcastEpisode search vector from title and description +CREATE OR REPLACE FUNCTION podcast_episode_search_vector_trigger() RETURNS trigger AS $$ +BEGIN + -- Combine title and description into search vector + -- Using setweight: title (A), description (B) + NEW."searchVector" := + setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') || + setweight(to_tsvector('english', COALESCE(NEW.description, '')), 'B'); + + RETURN NEW; +END +$$ LANGUAGE plpgsql; + +-- Create trigger to auto-update PodcastEpisode search vector +DROP TRIGGER IF EXISTS podcast_episode_search_vector_update ON "PodcastEpisode"; +CREATE TRIGGER podcast_episode_search_vector_update + BEFORE INSERT OR UPDATE OF title, description + ON "PodcastEpisode" + FOR EACH ROW + EXECUTE FUNCTION podcast_episode_search_vector_trigger(); + +-- ============================================================================ +-- AUDIOBOOK SEARCH VECTOR FUNCTION +-- ============================================================================ +-- Function to generate Audiobook search vector from title, author, narrator, series, and description +CREATE OR REPLACE FUNCTION audiobook_search_vector_trigger() RETURNS trigger AS $$ +BEGIN + -- Combine title, author/narrator/series, and description into search vector + -- Using setweight: title (A), author/narrator/series (B), description (C) + NEW."searchVector" := + setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') || + setweight(to_tsvector('english', COALESCE(NEW.author, '')), 'B') || + setweight(to_tsvector('english', COALESCE(NEW.narrator, '')), 'B') || + setweight(to_tsvector('english', COALESCE(NEW.series, '')), 'B') || + setweight(to_tsvector('english', COALESCE(NEW.description, '')), 'C'); + + RETURN NEW; +END +$$ LANGUAGE plpgsql; + +-- Create trigger to auto-update Audiobook search vector +DROP TRIGGER IF EXISTS audiobook_search_vector_update ON "Audiobook"; +CREATE TRIGGER audiobook_search_vector_update + BEFORE INSERT OR UPDATE OF title, author, narrator, series, description + ON "Audiobook" + FOR EACH ROW + EXECUTE FUNCTION audiobook_search_vector_trigger(); + +-- ============================================================================ +-- ADD SEARCH VECTOR COLUMNS +-- ============================================================================ +-- Add searchVector column to Podcast table +ALTER TABLE "Podcast" ADD COLUMN IF NOT EXISTS "searchVector" tsvector; + +-- Add searchVector column to PodcastEpisode table +ALTER TABLE "PodcastEpisode" ADD COLUMN IF NOT EXISTS "searchVector" tsvector; + +-- Add searchVector column to Audiobook table +ALTER TABLE "Audiobook" ADD COLUMN IF NOT EXISTS "searchVector" tsvector; + +-- ============================================================================ +-- CREATE GIN INDEXES +-- ============================================================================ +-- Create GIN index on Podcast search vector +CREATE INDEX IF NOT EXISTS "Podcast_searchVector_idx" ON "Podcast" USING GIN ("searchVector"); + +-- Create GIN index on PodcastEpisode search vector +CREATE INDEX IF NOT EXISTS "PodcastEpisode_searchVector_idx" ON "PodcastEpisode" USING GIN ("searchVector"); + +-- Create GIN index on Audiobook search vector +CREATE INDEX IF NOT EXISTS "Audiobook_searchVector_idx" ON "Audiobook" USING GIN ("searchVector"); + +-- ============================================================================ +-- POPULATE EXISTING RECORDS +-- ============================================================================ +-- Update all existing Podcasts to populate their search vectors +UPDATE "Podcast" +SET "searchVector" = + setweight(to_tsvector('english', COALESCE(title, '')), 'A') || + setweight(to_tsvector('english', COALESCE(author, '')), 'B') || + setweight(to_tsvector('english', COALESCE(description, '')), 'C'); + +-- Update all existing PodcastEpisodes to populate their search vectors +UPDATE "PodcastEpisode" +SET "searchVector" = + setweight(to_tsvector('english', COALESCE(title, '')), 'A') || + setweight(to_tsvector('english', COALESCE(description, '')), 'B'); + +-- Update all existing Audiobooks to populate their search vectors +UPDATE "Audiobook" +SET "searchVector" = + setweight(to_tsvector('english', COALESCE(title, '')), 'A') || + setweight(to_tsvector('english', COALESCE(author, '')), 'B') || + setweight(to_tsvector('english', COALESCE(narrator, '')), 'B') || + setweight(to_tsvector('english', COALESCE(series, '')), 'B') || + setweight(to_tsvector('english', COALESCE(description, '')), 'C'); diff --git a/backend/prisma/migrations/20251230234224_add_enrichment_and_overrides/migration.sql b/backend/prisma/migrations/20251230234224_add_enrichment_and_overrides/migration.sql new file mode 100644 index 0000000..8708a67 --- /dev/null +++ b/backend/prisma/migrations/20251230234224_add_enrichment_and_overrides/migration.sql @@ -0,0 +1,32 @@ +-- CreateTable +CREATE TABLE "EnrichmentFailure" ( + "id" TEXT NOT NULL, + "entityType" TEXT NOT NULL, + "entityId" TEXT NOT NULL, + "entityName" TEXT, + "errorMessage" TEXT, + "errorCode" TEXT, + "retryCount" INTEGER NOT NULL DEFAULT 0, + "maxRetries" INTEGER NOT NULL DEFAULT 3, + "firstFailedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "lastFailedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "skipped" BOOLEAN NOT NULL DEFAULT false, + "skippedAt" TIMESTAMP(3), + "resolved" BOOLEAN NOT NULL DEFAULT false, + "resolvedAt" TIMESTAMP(3), + "metadata" JSONB, + + CONSTRAINT "EnrichmentFailure_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE INDEX "EnrichmentFailure_entityType_resolved_idx" ON "EnrichmentFailure"("entityType", "resolved"); + +-- CreateIndex +CREATE INDEX "EnrichmentFailure_skipped_idx" ON "EnrichmentFailure"("skipped"); + +-- CreateIndex +CREATE INDEX "EnrichmentFailure_lastFailedAt_idx" ON "EnrichmentFailure"("lastFailedAt"); + +-- CreateIndex +CREATE UNIQUE INDEX "EnrichmentFailure_entityType_entityId_key" ON "EnrichmentFailure"("entityType", "entityId"); diff --git a/backend/prisma/migrations/20251231041041_add_original_year_to_album/migration.sql b/backend/prisma/migrations/20251231041041_add_original_year_to_album/migration.sql new file mode 100644 index 0000000..709b487 --- /dev/null +++ b/backend/prisma/migrations/20251231041041_add_original_year_to_album/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "Album" ADD COLUMN "originalYear" INTEGER; diff --git a/backend/prisma/migrations/20260101152925_add_lidarr_webhook_secret/migration.sql b/backend/prisma/migrations/20260101152925_add_lidarr_webhook_secret/migration.sql new file mode 100644 index 0000000..6c998e2 --- /dev/null +++ b/backend/prisma/migrations/20260101152925_add_lidarr_webhook_secret/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "SystemSettings" ADD COLUMN "lidarrWebhookSecret" TEXT; diff --git a/backend/prisma/migrations/20260102142537_add_analysis_started_at/migration.sql b/backend/prisma/migrations/20260102142537_add_analysis_started_at/migration.sql new file mode 100644 index 0000000..49d1c73 --- /dev/null +++ b/backend/prisma/migrations/20260102142537_add_analysis_started_at/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "Track" ADD COLUMN "analysisStartedAt" TIMESTAMP(3); diff --git a/backend/prisma/migrations/20260102150000_add_audio_analyzer_workers/migration.sql b/backend/prisma/migrations/20260102150000_add_audio_analyzer_workers/migration.sql new file mode 100644 index 0000000..916d7a8 --- /dev/null +++ b/backend/prisma/migrations/20260102150000_add_audio_analyzer_workers/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "SystemSettings" ADD COLUMN "audioAnalyzerWorkers" INTEGER NOT NULL DEFAULT 2; diff --git a/backend/prisma/migrations/20260103045951_add_lastfm_api_key/migration.sql b/backend/prisma/migrations/20260103045951_add_lastfm_api_key/migration.sql new file mode 100644 index 0000000..93a2b0e --- /dev/null +++ b/backend/prisma/migrations/20260103045951_add_lastfm_api_key/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "SystemSettings" ADD COLUMN "lastfmApiKey" TEXT; diff --git a/backend/prisma/migrations/20260104000000_add_soulseek_concurrent_downloads/migration.sql b/backend/prisma/migrations/20260104000000_add_soulseek_concurrent_downloads/migration.sql new file mode 100644 index 0000000..a5cdb3c --- /dev/null +++ b/backend/prisma/migrations/20260104000000_add_soulseek_concurrent_downloads/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "SystemSettings" ADD COLUMN "soulseekConcurrentDownloads" INTEGER NOT NULL DEFAULT 4; diff --git a/backend/prisma/schema.prisma b/backend/prisma/schema.prisma index 04c10f6..0bc9055 100644 --- a/backend/prisma/schema.prisma +++ b/backend/prisma/schema.prisma @@ -18,6 +18,7 @@ model User { twoFactorSecret String? // TOTP secret (encrypted) twoFactorRecoveryCodes String? // Recovery codes (encrypted, comma-separated hashed codes) moodMixParams Json? // Saved mood mix parameters for "Your Mood Mix" + tokenVersion Int @default(0) // Incremented on password change to invalidate tokens createdAt DateTime @default(now()) plays Play[] @@ -77,9 +78,10 @@ model SystemSettings { // === Download Services === // Lidarr - lidarrEnabled Boolean @default(true) - lidarrUrl String? @default("http://localhost:8686") - lidarrApiKey String? // Encrypted + lidarrEnabled Boolean @default(true) + lidarrUrl String? @default("http://localhost:8686") + lidarrApiKey String? // Encrypted + lidarrWebhookSecret String? // Encrypted - Shared secret for webhook verification // === AI Services === // OpenAI (for future AI features) @@ -92,6 +94,9 @@ model SystemSettings { fanartEnabled Boolean @default(false) fanartApiKey String? // Encrypted + // Last.fm (optional user override - app ships with default key) + lastfmApiKey String? // Encrypted + // === Media Services === // Audiobookshelf audiobookshelfEnabled Boolean @default(false) @@ -118,12 +123,15 @@ model SystemSettings { maxConcurrentDownloads Int @default(3) downloadRetryAttempts Int @default(3) transcodeCacheMaxGb Int @default(10) // Transcode cache size limit in GB + enrichmentConcurrency Int @default(1) // 1-5, number of parallel enrichment workers + audioAnalyzerWorkers Int @default(2) // 1-8, number of parallel audio analysis workers + soulseekConcurrentDownloads Int @default(4) // 1-10, concurrent Soulseek downloads // === Download Preferences === // Primary download source: "soulseek" (per-track) or "lidarr" (full albums) - downloadSource String @default("soulseek") - // When soulseek is primary and fails: "none" (skip) or "lidarr" (download full album) - soulseekFallback String @default("none") + downloadSource String @default("soulseek") + // Fallback when primary source fails: "none" (skip), "lidarr" (full album), or "soulseek" (track-based) + primaryFailureFallback String @default("none") updatedAt DateTime @updatedAt createdAt DateTime @default(now()) @@ -143,6 +151,13 @@ model Artist { enrichmentStatus String @default("pending") // pending, enriching, completed, failed searchVector Unsupported("tsvector")? + // User overrides (optional, takes display precedence) + displayName String? // User-provided display name + userSummary String? @db.Text // User-provided bio + userHeroUrl String? // User-uploaded/linked image + userGenres Json? // User-modified genres (array of strings) + hasUserOverrides Boolean @default(false) // Quick check flag + albums Album[] similarFrom SimilarArtist[] @relation("FromArtist") similarTo SimilarArtist[] @relation("ToArtist") @@ -151,6 +166,7 @@ model Artist { @@index([name]) @@index([normalizedName]) @@index([searchVector], type: Gin) + @@index([hasUserOverrides]) } model Album { @@ -158,7 +174,8 @@ model Album { rgMbid String @unique // release group MBID artistId String title String - year Int? + year Int? // File metadata date (may be remaster) + originalYear Int? // Original release date from MusicBrainz coverUrl String? primaryType String // Album, EP, Single, Live, Compilation label String? // Record label (from MusicBrainz) @@ -167,6 +184,13 @@ model Album { location AlbumLocation @default(LIBRARY) // LIBRARY or DISCOVER searchVector Unsupported("tsvector")? + // User overrides (optional, takes display precedence) + displayTitle String? // User-provided display title + displayYear Int? // User-provided year + userCoverUrl String? // User-uploaded/linked cover + userGenres Json? // User-modified genres (array of strings) + hasUserOverrides Boolean @default(false) // Quick check flag + artist Artist @relation(fields: [artistId], references: [id], onDelete: Cascade) tracks Track[] @@ -174,6 +198,7 @@ model Album { @@index([location]) @@index([title]) @@index([searchVector], type: Gin) + @@index([hasUserOverrides]) } model Track { @@ -190,6 +215,11 @@ model Track { fileModified DateTime // mtime for change detection fileSize Int // File size in bytes + // User overrides (optional, takes display precedence) + displayTitle String? // User-provided display title + displayTrackNo Int? // User-provided track number + hasUserOverrides Boolean @default(false) // Quick check flag + // === Audio Analysis (Essentia) === // Rhythm bpm Float? // Beats per minute (e.g., 120.5) @@ -235,13 +265,14 @@ model Track { lastfmTags String[] // ["chill", "workout", "sad", "90s"] // Analysis Metadata - analysisStatus String @default("pending") // pending, processing, completed, failed - analysisVersion String? // Essentia version used - analysisMode String? // 'standard' or 'enhanced' - analyzedAt DateTime? - analysisError String? // Error message if failed - analysisRetryCount Int @default(0) // Number of retry attempts - updatedAt DateTime @updatedAt + analysisStatus String @default("pending") // pending, processing, completed, failed + analysisStartedAt DateTime? // When processing began (for timeout detection) + analysisVersion String? // Essentia version used + analysisMode String? // 'standard' or 'enhanced' + analyzedAt DateTime? + analysisError String? // Error message if failed + analysisRetryCount Int @default(0) // Number of retry attempts + updatedAt DateTime @updatedAt album Album @relation(fields: [albumId], references: [id], onDelete: Cascade) plays Play[] @@ -272,6 +303,7 @@ model Track { @@index([arousal]) @@index([acousticness]) @@index([instrumentalness]) + @@index([hasUserOverrides]) } // Transcoded file cache for audio streaming @@ -479,6 +511,7 @@ model DownloadJob { @@index([startedAt]) @@index([lidarrRef]) @@index([artistMbid]) + @@index([targetMbid]) } model ListeningState { @@ -640,6 +673,9 @@ model Audiobook { audioUrl String // Audiobookshelf streaming URL libraryId String? // Audiobookshelf library ID + // Full-text search + searchVector Unsupported("tsvector")? + // Timestamps createdAt DateTime @default(now()) updatedAt DateTime @updatedAt @@ -649,6 +685,7 @@ model Audiobook { @@index([author]) @@index([series]) @@index([lastSyncedAt]) + @@index([searchVector], type: Gin) } model PodcastRecommendation { @@ -676,46 +713,49 @@ model PodcastRecommendation { // ============================================ model Podcast { - id String @id @default(cuid()) - feedUrl String @unique + id String @id @default(cuid()) + feedUrl String @unique title String author String? - description String? @db.Text - imageUrl String? // Original feed image URL - localCoverPath String? // Local cached cover image path - itunesId String? @unique + description String? @db.Text + imageUrl String? // Original feed image URL + localCoverPath String? // Local cached cover image path + itunesId String? @unique language String? - explicit Boolean @default(false) - episodeCount Int @default(0) - lastRefreshed DateTime @default(now()) - refreshInterval Int @default(3600) // seconds (1 hour default) - autoRefresh Boolean @default(true) - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt + explicit Boolean @default(false) + episodeCount Int @default(0) + lastRefreshed DateTime @default(now()) + refreshInterval Int @default(3600) // seconds (1 hour default) + autoRefresh Boolean @default(true) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + searchVector Unsupported("tsvector")? episodes PodcastEpisode[] subscriptions PodcastSubscription[] @@index([itunesId]) @@index([lastRefreshed]) + @@index([searchVector], type: Gin) } model PodcastEpisode { - id String @id @default(cuid()) + id String @id @default(cuid()) podcastId String - guid String // RSS GUID (unique per feed) + guid String // RSS GUID (unique per feed) title String - description String? @db.Text - audioUrl String // Direct MP3/audio URL from RSS - duration Int @default(0) // seconds + description String? @db.Text + audioUrl String // Direct MP3/audio URL from RSS + duration Int @default(0) // seconds publishedAt DateTime episodeNumber Int? season Int? - imageUrl String? // Episode-specific image URL - localCoverPath String? // Local cached episode cover - fileSize Int? // bytes - mimeType String? @default("audio/mpeg") - createdAt DateTime @default(now()) + imageUrl String? // Episode-specific image URL + localCoverPath String? // Local cached episode cover + fileSize Int? // bytes + mimeType String? @default("audio/mpeg") + createdAt DateTime @default(now()) + searchVector Unsupported("tsvector")? podcast Podcast @relation(fields: [podcastId], references: [id], onDelete: Cascade) progress PodcastProgress[] @@ -723,6 +763,7 @@ model PodcastEpisode { @@unique([podcastId, guid]) @@index([podcastId, publishedAt]) + @@index([searchVector], type: Gin) } // User podcast subscriptions @@ -976,3 +1017,30 @@ model Notification { @@index([userId, read]) @@index([createdAt]) } + +// ============================================ +// Enrichment Failure Tracking +// ============================================ + +model EnrichmentFailure { + id String @id @default(cuid()) + entityType String // artist, track, audio + entityId String // Artist/Track ID + entityName String? // Display name + errorMessage String? // Human-readable error + errorCode String? // Machine-readable code + retryCount Int @default(0) + maxRetries Int @default(3) + firstFailedAt DateTime @default(now()) + lastFailedAt DateTime @default(now()) + skipped Boolean @default(false) + skippedAt DateTime? + resolved Boolean @default(false) + resolvedAt DateTime? + metadata Json? // Additional context (filePath, etc.) + + @@unique([entityType, entityId]) + @@index([entityType, resolved]) + @@index([skipped]) + @@index([lastFailedAt]) +} diff --git a/backend/scripts/backfill-original-year.ts b/backend/scripts/backfill-original-year.ts new file mode 100644 index 0000000..a6bb877 --- /dev/null +++ b/backend/scripts/backfill-original-year.ts @@ -0,0 +1,146 @@ +#!/usr/bin/env ts-node +/** + * Backfill Script: Populate originalYear for existing albums + * + * This script populates the new originalYear field for albums that don't have it yet. + * + * Strategy: + * 1. For albums already enriched with MusicBrainz data, copy year to originalYear + * (since enrichment overwrites year with the original release date) + * 2. Skip temporary albums (temp-* MBIDs) + * + * Usage: + * npx ts-node scripts/backfill-original-year.ts [--dry-run] + * + * Options: + * --dry-run Show what would be updated without making changes + */ + +import { PrismaClient } from "@prisma/client"; + +const prisma = new PrismaClient(); + +async function backfillOriginalYear(dryRun: boolean = false) { + console.log("=== Backfill originalYear Script ===\n"); + console.log( + `Mode: ${ + dryRun ? "DRY RUN (no changes)" : "LIVE (will update database)" + }\n` + ); + + try { + // Find albums that need backfilling + const albumsToBackfill = await prisma.album.findMany({ + where: { + originalYear: null, + year: { not: null }, // Only albums that have a year value + rgMbid: { not: { startsWith: "temp-" } }, // Skip temporary albums + }, + select: { + id: true, + rgMbid: true, + title: true, + year: true, + originalYear: true, + artist: { + select: { + name: true, + }, + }, + }, + }); + + console.log(`Found ${albumsToBackfill.length} albums to backfill\n`); + + if (albumsToBackfill.length === 0) { + console.log("✓ No albums need backfilling. All done!"); + return; + } + + // Show sample of albums to be updated + console.log("Sample of albums to be updated:"); + albumsToBackfill.slice(0, 5).forEach((album, idx) => { + console.log( + ` ${idx + 1}. "${album.title}" by ${album.artist.name}` + ); + console.log( + ` Current: year=${album.year}, originalYear=${album.originalYear}` + ); + console.log(` Will set: originalYear=${album.year}\n`); + }); + + if (albumsToBackfill.length > 5) { + console.log( + ` ... and ${albumsToBackfill.length - 5} more albums\n` + ); + } + + if (dryRun) { + console.log( + "DRY RUN: No changes made. Remove --dry-run to apply updates." + ); + return; + } + + // Confirm before proceeding in live mode + console.log( + `Proceeding with backfill of ${albumsToBackfill.length} albums...\n` + ); + + // Process in batches to avoid overwhelming the database + const BATCH_SIZE = 100; + let processed = 0; + let updated = 0; + + for (let i = 0; i < albumsToBackfill.length; i += BATCH_SIZE) { + const batch = albumsToBackfill.slice(i, i + BATCH_SIZE); + + // Update each album in the batch + const updatePromises = batch.map((album) => + prisma.album.update({ + where: { id: album.id }, + data: { originalYear: album.year }, + }) + ); + + await Promise.all(updatePromises); + + processed += batch.length; + updated += batch.length; + + const progress = ( + (processed / albumsToBackfill.length) * + 100 + ).toFixed(1); + console.log( + `Progress: ${processed}/${albumsToBackfill.length} (${progress}%) albums updated` + ); + } + + console.log(`\n✓ Backfill complete!`); + console.log(` - Total albums updated: ${updated}`); + console.log(` - Field populated: originalYear`); + console.log( + `\nNote: Future albums will have originalYear populated automatically during enrichment.` + ); + } catch (error) { + console.error("\n✗ Error during backfill:", error); + throw error; + } finally { + await prisma.$disconnect(); + } +} + +// Parse command line arguments +const args = process.argv.slice(2); +const dryRun = args.includes("--dry-run"); + +// Run the backfill +backfillOriginalYear(dryRun) + .then(() => { + process.exit(0); + }) + .catch((error) => { + console.error(error); + process.exit(1); + }); diff --git a/backend/src/config.ts b/backend/src/config.ts index e77e590..6807094 100644 --- a/backend/src/config.ts +++ b/backend/src/config.ts @@ -1,6 +1,8 @@ import dotenv from "dotenv"; import { z } from "zod"; +import * as fs from "fs"; import { validateMusicConfig, MusicConfig } from "./utils/configValidator"; +import { logger } from "./utils/logger"; dotenv.config(); @@ -18,14 +20,14 @@ const envSchema = z.object({ try { envSchema.parse(process.env); - console.log("Environment variables validated"); + logger.debug("Environment variables validated"); } catch (error) { if (error instanceof z.ZodError) { - console.error(" Environment validation failed:"); + logger.error(" Environment validation failed:"); error.errors.forEach((err) => { - console.error(` - ${err.path.join(".")}: ${err.message}`); + logger.error(` - ${err.path.join(".")}: ${err.message}`); }); - console.error( + logger.error( "\n Please check your .env file and ensure all required variables are set." ); process.exit(1); @@ -47,10 +49,10 @@ let musicConfig: MusicConfig = { export async function initializeMusicConfig() { try { musicConfig = await validateMusicConfig(); - console.log("Music configuration initialized"); + logger.debug("Music configuration initialized"); } catch (err: any) { - console.error(" Configuration validation failed:", err.message); - console.warn(" Using default/environment configuration"); + logger.error(" Configuration validation failed:", err.message); + logger.warn(" Using default/environment configuration"); // Don't exit process - allow app to start for other features // Music features will fail gracefully if config is invalid } @@ -80,11 +82,9 @@ export const config = { } : undefined, - // Last.fm - ships with default app key, users can override in settings + // Last.fm - ships with default app key, user can optionally override lastfm: { - // Default application API key (free tier, for public use) - // Users can override this in System Settings with their own key - apiKey: process.env.LASTFM_API_KEY || "c1797de6bf0b7e401b623118120cd9e1", + apiKey: process.env.LASTFM_API_KEY || "95fe0eaa9875db7bb8539b2c738b4dcd", }, // OpenAI - reads from database diff --git a/backend/src/index.ts b/backend/src/index.ts index c44602c..e6a0859 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -6,6 +6,7 @@ import helmet from "helmet"; import { config } from "./config"; import { redisClient } from "./utils/redis"; import { prisma } from "./utils/db"; +import { logger } from "./utils/logger"; import authRoutes from "./routes/auth"; import onboardingRoutes from "./routes/onboarding"; @@ -38,6 +39,7 @@ import analysisRoutes from "./routes/analysis"; import releasesRoutes from "./routes/releases"; import { dataCacheService } from "./services/dataCache"; import { errorHandler } from "./middleware/errorHandler"; +import { requireAuth, requireAdmin } from "./middleware/auth"; import { authLimiter, apiLimiter, @@ -80,7 +82,7 @@ app.use( } else { // For self-hosted: allow anyway but log it // Users shouldn't have to configure CORS for their own app - console.log( + logger.debug( `[CORS] Origin ${origin} not in allowlist, allowing anyway (self-hosted)` ); callback(null, true); @@ -111,10 +113,8 @@ app.use( proxy: true, // Trust the reverse proxy cookie: { httpOnly: true, - // For self-hosted apps: allow HTTP access (common for LAN deployments) - // If behind HTTPS reverse proxy, the proxy should handle security - secure: false, - sameSite: "lax", + secure: process.env.NODE_ENV === "production", + sameSite: "strict", maxAge: 1000 * 60 * 60 * 24 * 7, // 7 days }, }) @@ -167,8 +167,15 @@ app.get("/api/health", (req, res) => { }); // Swagger API Documentation +// In production: require auth unless DOCS_PUBLIC=true +// In development: always public for easier testing +const docsMiddleware = config.nodeEnv === "production" && process.env.DOCS_PUBLIC !== "true" + ? [requireAuth] + : []; + app.use( "/api/docs", + ...docsMiddleware, swaggerUi.serve, swaggerUi.setup(swaggerSpec, { customCss: ".swagger-ui .topbar { display: none }", @@ -177,15 +184,60 @@ app.use( ); // Serve raw OpenAPI spec -app.get("/api/docs.json", (req, res) => { +app.get("/api/docs.json", ...docsMiddleware, (req, res) => { res.json(swaggerSpec); }); // Error handler app.use(errorHandler); +// Health check functions +async function checkPostgresConnection() { + try { + await prisma.$queryRaw`SELECT 1`; + logger.debug("✓ PostgreSQL connection verified"); + } catch (error) { + logger.error("✗ PostgreSQL connection failed:", { + error: error instanceof Error ? error.message : String(error), + databaseUrl: config.databaseUrl?.replace(/:[^:@]+@/, ':***@') // Hide password + }); + logger.error("Unable to connect to PostgreSQL. Please ensure:"); + logger.error(" 1. PostgreSQL is running on the correct port (default: 5433)"); + logger.error(" 2. DATABASE_URL in .env is correct"); + logger.error(" 3. Database credentials are valid"); + process.exit(1); + } +} + +async function checkRedisConnection() { + try { + // Check if Redis client is actually connected + // The redis client has automatic reconnection, so we need to check status first + if (!redisClient.isReady) { + throw new Error("Redis client is not ready - connection failed or still connecting"); + } + + // If connected, verify with ping + await redisClient.ping(); + logger.debug("✓ Redis connection verified"); + } catch (error) { + logger.error("✗ Redis connection failed:", { + error: error instanceof Error ? error.message : String(error), + redisUrl: config.redisUrl?.replace(/:[^:@]+@/, ':***@') // Hide password if any + }); + logger.error("Unable to connect to Redis. Please ensure:"); + logger.error(" 1. Redis is running on the correct port (default: 6380)"); + logger.error(" 2. REDIS_URL in .env is correct"); + process.exit(1); + } +} + app.listen(config.port, "0.0.0.0", async () => { - console.log( + // Verify database connections before proceeding + await checkPostgresConnection(); + await checkRedisConnection(); + + logger.debug( `Lidify API running on port ${config.port} (accessible on all network interfaces)` ); @@ -224,8 +276,8 @@ app.listen(config.port, "0.0.0.0", async () => { serverAdapter, }); - app.use("/api/admin/queues", serverAdapter.getRouter()); - console.log("Bull Board dashboard available at /api/admin/queues"); + app.use("/api/admin/queues", requireAuth, requireAdmin, serverAdapter.getRouter()); + logger.debug("Bull Board dashboard available at /api/admin/queues (admin-only)"); // Note: Native library scanning is now triggered manually via POST /library/scan // No automatic sync on startup - user must manually scan their music folder @@ -233,7 +285,7 @@ app.listen(config.port, "0.0.0.0", async () => { // Enrichment worker enabled for OWNED content only // - Background enrichment: Genres, MBIDs, similar artists for owned albums/artists // - On-demand fetching: Artist images, bios when browsing (cached in Redis 7 days) - console.log( + logger.debug( "Background enrichment enabled for owned content (genres, MBIDs, etc.)" ); @@ -241,7 +293,7 @@ app.listen(config.port, "0.0.0.0", async () => { // This populates Redis with existing artist images and album covers // so first page loads are instant instead of waiting for cache population dataCacheService.warmupCache().catch((err) => { - console.error("Cache warmup failed:", err); + logger.error("Cache warmup failed:", err); }); // Podcast cache cleanup - runs daily to remove cached episodes older than 30 days @@ -249,17 +301,62 @@ app.listen(config.port, "0.0.0.0", async () => { // Run cleanup on startup (async, don't block) cleanupExpiredCache().catch((err) => { - console.error("Podcast cache cleanup failed:", err); + logger.error("Podcast cache cleanup failed:", err); }); // Schedule daily cleanup (every 24 hours) const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000; setInterval(() => { cleanupExpiredCache().catch((err) => { - console.error("Scheduled podcast cache cleanup failed:", err); + logger.error("Scheduled podcast cache cleanup failed:", err); }); }, TWENTY_FOUR_HOURS); - console.log("Podcast cache cleanup scheduled (daily, 30-day expiry)"); + logger.debug("Podcast cache cleanup scheduled (daily, 30-day expiry)"); + + // Auto-sync audiobooks on startup if cache is empty + // This prevents "disappeared" audiobooks after container rebuilds + (async () => { + try { + const { getSystemSettings } = await import("./utils/systemSettings"); + const settings = await getSystemSettings(); + + // Only proceed if Audiobookshelf is configured and enabled + if (settings?.audiobookshelfEnabled && settings?.audiobookshelfUrl) { + // Check if cache is empty + const cachedCount = await prisma.audiobook.count(); + + if (cachedCount === 0) { + logger.debug( + "[STARTUP] Audiobook cache is empty - auto-syncing from Audiobookshelf..." + ); + const { audiobookCacheService } = await import( + "./services/audiobookCache" + ); + const result = await audiobookCacheService.syncAll(); + logger.debug( + `[STARTUP] Audiobook auto-sync complete: ${result.synced} audiobooks cached` + ); + } else { + logger.debug( + `[STARTUP] Audiobook cache has ${cachedCount} entries - skipping auto-sync` + ); + } + } + } catch (err) { + logger.error("[STARTUP] Audiobook auto-sync failed:", err); + // Non-fatal - user can manually sync later + } + })(); + + // Reconcile download queue state with database + const { downloadQueueManager } = await import("./services/downloadQueue"); + try { + const result = await downloadQueueManager.reconcileOnStartup(); + logger.debug(`Download queue reconciled: ${result.loaded} active, ${result.failed} marked failed`); + } catch (err) { + logger.error("Download queue reconciliation failed:", err); + // Non-fatal - queue will start fresh + } }); // Graceful shutdown handling @@ -267,12 +364,12 @@ let isShuttingDown = false; async function gracefulShutdown(signal: string) { if (isShuttingDown) { - console.log("Shutdown already in progress..."); + logger.debug("Shutdown already in progress..."); return; } isShuttingDown = true; - console.log(`\nReceived ${signal}. Starting graceful shutdown...`); + logger.debug(`\nReceived ${signal}. Starting graceful shutdown...`); try { // Shutdown workers (intervals, crons, queues) @@ -280,17 +377,17 @@ async function gracefulShutdown(signal: string) { await shutdownWorkers(); // Close Redis connection - console.log("Closing Redis connection..."); + logger.debug("Closing Redis connection..."); await redisClient.quit(); // Close Prisma connection - console.log("Closing database connection..."); + logger.debug("Closing database connection..."); await prisma.$disconnect(); - console.log("Graceful shutdown complete"); + logger.debug("Graceful shutdown complete"); process.exit(0); } catch (error) { - console.error("Error during shutdown:", error); + logger.error("Error during shutdown:", error); process.exit(1); } } diff --git a/backend/src/jobs/queueCleaner.ts b/backend/src/jobs/queueCleaner.ts index 672fcc9..671e1ba 100644 --- a/backend/src/jobs/queueCleaner.ts +++ b/backend/src/jobs/queueCleaner.ts @@ -1,4 +1,5 @@ import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; import { getSystemSettings } from "../utils/systemSettings"; import { cleanStuckDownloads, @@ -14,19 +15,45 @@ class QueueCleanerService { private maxEmptyChecks = 3; // Stop after 3 consecutive empty checks private timeoutId?: NodeJS.Timeout; + // Cached dynamic imports (lazy-loaded once, reused on subsequent calls) + private discoverWeeklyService: typeof import("../services/discoverWeekly")["discoverWeeklyService"] | null = null; + private matchAlbum: typeof import("../utils/fuzzyMatch")["matchAlbum"] | null = null; + + /** + * Get discoverWeeklyService (lazy-loaded and cached) + */ + private async getDiscoverWeeklyService() { + if (!this.discoverWeeklyService) { + const module = await import("../services/discoverWeekly"); + this.discoverWeeklyService = module.discoverWeeklyService; + } + return this.discoverWeeklyService; + } + + /** + * Get matchAlbum function (lazy-loaded and cached) + */ + private async getMatchAlbum() { + if (!this.matchAlbum) { + const module = await import("../utils/fuzzyMatch"); + this.matchAlbum = module.matchAlbum; + } + return this.matchAlbum; + } + /** * Start the polling loop * Safe to call multiple times - won't create duplicate loops */ async start() { if (this.isRunning) { - console.log(" Queue cleaner already running"); + logger.debug(" Queue cleaner already running"); return; } this.isRunning = true; this.emptyQueueChecks = 0; - console.log(" Queue cleaner started (checking every 30s)"); + logger.debug(" Queue cleaner started (checking every 30s)"); await this.runCleanup(); } @@ -40,7 +67,7 @@ class QueueCleanerService { this.timeoutId = undefined; } this.isRunning = false; - console.log(" Queue cleaner stopped (queue empty)"); + logger.debug(" Queue cleaner stopped (queue empty)"); } /** @@ -54,7 +81,7 @@ class QueueCleanerService { const settings = await getSystemSettings(); if (!settings?.lidarrUrl || !settings?.lidarrApiKey) { - console.log(" Lidarr not configured, stopping queue cleaner"); + logger.debug(" Lidarr not configured, stopping queue cleaner"); this.stop(); return; } @@ -63,7 +90,7 @@ class QueueCleanerService { const staleCount = await simpleDownloadManager.markStaleJobsAsFailed(); if (staleCount > 0) { - console.log(`⏰ Cleaned up ${staleCount} stale download(s)`); + logger.debug(`⏰ Cleaned up ${staleCount} stale download(s)`); this.emptyQueueChecks = 0; // Reset counter } @@ -71,20 +98,37 @@ class QueueCleanerService { const reconcileResult = await simpleDownloadManager.reconcileWithLidarr(); if (reconcileResult.reconciled > 0) { - console.log( + logger.debug( `✓ Reconciled ${reconcileResult.reconciled} job(s) with Lidarr` ); this.emptyQueueChecks = 0; // Reset counter } + // PART 0.26: Sync with Lidarr queue (detect cancelled downloads) + const queueSyncResult = await simpleDownloadManager.syncWithLidarrQueue(); + if (queueSyncResult.cancelled > 0) { + logger.debug( + `✓ Synced ${queueSyncResult.cancelled} job(s) with Lidarr queue (cancelled/completed)` + ); + this.emptyQueueChecks = 0; // Reset counter + } + + // PART 0.3: Reconcile processing jobs with local library (critical fix for #31) + // Check if albums already exist in Lidify's database even if Lidarr webhooks were missed + const localReconcileResult = await this.reconcileWithLocalLibrary(); + if (localReconcileResult.reconciled > 0) { + logger.debug( + `✓ Reconciled ${localReconcileResult.reconciled} job(s) with local library` + ); + this.emptyQueueChecks = 0; // Reset counter + } + // PART 0.5: Check for stuck discovery batches (batch-level timeout) - const { discoverWeeklyService } = await import( - "../services/discoverWeekly" - ); + const discoverWeeklyService = await this.getDiscoverWeeklyService(); const stuckBatchCount = await discoverWeeklyService.checkStuckBatches(); if (stuckBatchCount > 0) { - console.log( + logger.debug( `⏰ Force-completed ${stuckBatchCount} stuck discovery batch(es)` ); this.emptyQueueChecks = 0; // Reset counter @@ -97,7 +141,7 @@ class QueueCleanerService { ); if (cleanResult.removed > 0) { - console.log( + logger.debug( `[CLEANUP] Removed ${cleanResult.removed} stuck download(s) - searching for alternatives` ); this.emptyQueueChecks = 0; // Reset counter - queue had activity @@ -143,7 +187,7 @@ class QueueCleanerService { }, }); - console.log( + logger.debug( ` Updated job ${job.id}: retry ${ currentRetryCount + 1 }` @@ -187,10 +231,10 @@ class QueueCleanerService { const artistName = download.artist?.name || "Unknown Artist"; const albumTitle = download.album?.title || "Unknown Album"; - console.log( + logger.debug( `Recovered orphaned job: ${artistName} - ${albumTitle}` ); - console.log(` Download ID: ${download.downloadId}`); + logger.debug(` Download ID: ${download.downloadId}`); this.emptyQueueChecks = 0; // Reset counter - found work to do recoveredCount += orphanedJobs.length; @@ -219,11 +263,9 @@ class QueueCleanerService { } if (discoveryBatchIds.size > 0) { - const { discoverWeeklyService } = await import( - "../services/discoverWeekly" - ); + const discoverWeeklyService = await this.getDiscoverWeeklyService(); for (const batchId of discoveryBatchIds) { - console.log( + logger.debug( ` Checking Discovery batch completion: ${batchId}` ); await discoverWeeklyService.checkBatchCompletion( @@ -238,7 +280,7 @@ class QueueCleanerService { !j.discoveryBatchId ); if (nonDiscoveryJobs.length > 0) { - console.log( + logger.debug( ` Triggering library scan for recovered job(s)...` ); await scanQueue.add("scan", { @@ -250,12 +292,12 @@ class QueueCleanerService { } if (recoveredCount > 0) { - console.log(`Recovered ${recoveredCount} orphaned job(s)`); + logger.debug(`Recovered ${recoveredCount} orphaned job(s)`); } // Only log skipped count occasionally to reduce noise if (skippedCount > 0 && this.emptyQueueChecks === 0) { - console.log( + logger.debug( ` (Skipped ${skippedCount} incomplete download records)` ); } @@ -272,12 +314,12 @@ class QueueCleanerService { if (!hadActivity) { this.emptyQueueChecks++; - console.log( + logger.debug( ` Queue empty (${this.emptyQueueChecks}/${this.maxEmptyChecks})` ); if (this.emptyQueueChecks >= this.maxEmptyChecks) { - console.log( + logger.debug( ` No activity for ${this.maxEmptyChecks} checks - stopping cleaner` ); this.stop(); @@ -293,7 +335,7 @@ class QueueCleanerService { this.checkInterval ); } catch (error) { - console.error(" Queue cleanup error:", error); + logger.error(" Queue cleanup error:", error); // Still schedule next check even on error this.timeoutId = setTimeout( () => this.runCleanup(), @@ -302,6 +344,171 @@ class QueueCleanerService { } } + /** + * Reconcile processing jobs with local library (Phase 1 & 3 fix for #31) + * Checks if albums already exist in Lidify's database and marks matching jobs as complete + * This handles cases where: + * - Lidarr webhooks were missed + * - MBID mismatches between MusicBrainz and Lidarr + * - Album/artist name differences prevent webhook matching + * + * Phase 3 enhancement: Uses fuzzy matching to catch more name variations + * + * PUBLIC: Called by periodic reconciliation in workers/index.ts + */ + async reconcileWithLocalLibrary(): Promise<{ reconciled: number }> { + const processingJobs = await prisma.downloadJob.findMany({ + where: { status: { in: ["pending", "processing"] } }, + }); + + if (processingJobs.length === 0) { + return { reconciled: 0 }; + } + + logger.debug( + `[LOCAL-RECONCILE] Checking ${processingJobs.length} job(s) against local library...` + ); + + let reconciled = 0; + + for (const job of processingJobs) { + const metadata = (job.metadata as any) || {}; + const artistName = metadata?.artistName; + const albumTitle = metadata?.albumTitle; + + if (!artistName || !albumTitle) { + continue; + } + + try { + // First try: Exact/contains match (fast) + let localAlbum = await prisma.album.findFirst({ + where: { + AND: [ + { + artist: { + name: { + contains: artistName, + mode: "insensitive", + }, + }, + }, + { + title: { + contains: albumTitle, + mode: "insensitive", + }, + }, + ], + }, + include: { + tracks: { + select: { id: true }, + take: 1, + }, + artist: { + select: { name: true }, + }, + }, + }); + + // Second try: Fuzzy match if exact match failed (slower but more thorough) + if (!localAlbum || localAlbum.tracks.length === 0) { + const matchAlbum = await this.getMatchAlbum(); + + // Get all albums from artists with similar names + const candidateAlbums = await prisma.album.findMany({ + where: { + artist: { + name: { + contains: artistName.substring(0, 5), + mode: "insensitive", + }, + }, + }, + include: { + tracks: { + select: { id: true }, + take: 1, + }, + artist: { + select: { name: true }, + }, + }, + take: 50, // Limit to prevent performance issues + }); + + // Find best fuzzy match + const fuzzyMatch = candidateAlbums.find( + (album) => + album.tracks.length > 0 && + matchAlbum( + artistName, + albumTitle, + album.artist.name, + album.title, + 0.75 + ) + ); + + if (fuzzyMatch) { + localAlbum = fuzzyMatch; + } + + if (localAlbum) { + logger.debug( + `[LOCAL-RECONCILE] Fuzzy matched "${artistName} - ${albumTitle}" to "${localAlbum.artist.name} - ${localAlbum.title}"` + ); + } + } + + if (localAlbum && localAlbum.tracks.length > 0) { + logger.debug( + `[LOCAL-RECONCILE] ✓ Found "${localAlbum.artist.name} - ${localAlbum.title}" in library for job ${job.id}` + ); + + // Album exists with tracks - mark job complete + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + status: "completed", + completedAt: new Date(), + error: null, + metadata: { + ...metadata, + completedAt: new Date().toISOString(), + reconciledFromLocalLibrary: true, + }, + }, + }); + + reconciled++; + + // Check batch completion for discovery jobs + if (job.discoveryBatchId) { + const discoverWeeklyService = await this.getDiscoverWeeklyService(); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); + } + } + } catch (error: any) { + logger.error( + `[LOCAL-RECONCILE] Error checking job ${job.id}:`, + error.message + ); + } + } + + if (reconciled > 0) { + logger.debug( + `[LOCAL-RECONCILE] Marked ${reconciled} job(s) complete from local library` + ); + } + + return { reconciled }; + } + /** * Get current status (for debugging/monitoring) */ diff --git a/backend/src/middleware/auth.ts b/backend/src/middleware/auth.ts index 44dd4f1..bb4ff84 100644 --- a/backend/src/middleware/auth.ts +++ b/backend/src/middleware/auth.ts @@ -1,4 +1,5 @@ import { Request, Response, NextFunction } from "express"; +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; import jwt from "jsonwebtoken"; @@ -11,6 +12,9 @@ if (!JWT_SECRET) { ); } +// Type assertion after validation - JWT_SECRET is guaranteed to be a string +const JWT_SECRET_VALIDATED: string = JWT_SECRET; + declare global { namespace Express { interface Request { @@ -23,91 +27,177 @@ declare global { } } +export interface AuthenticatedRequest extends Request { + user: { + id: string; + username: string; + role: string; + }; +} + export interface JWTPayload { userId: string; username: string; role: string; + tokenVersion?: number; + type?: string; } -export function generateToken(user: { id: string; username: string; role: string }): string { +export function generateToken(user: { + id: string; + username: string; + role: string; + tokenVersion: number; +}): string { return jwt.sign( - { userId: user.id, username: user.username, role: user.role }, - JWT_SECRET, + { + userId: user.id, + username: user.username, + role: user.role, + tokenVersion: user.tokenVersion + }, + JWT_SECRET_VALIDATED, + { expiresIn: "24h" } + ); +} + +export function generateRefreshToken(user: { + id: string; + tokenVersion: number; +}): string { + return jwt.sign( + { + userId: user.id, + tokenVersion: user.tokenVersion, + type: "refresh" + }, + JWT_SECRET_VALIDATED, { expiresIn: "30d" } ); } +/** + * Helper function to authenticate a request using session, API key, or JWT + * @param req Express request object + * @param checkQueryToken Whether to check for token in query params (for streaming) + * @returns User object if authenticated, null otherwise + */ +async function authenticateRequest( + req: Request, + checkQueryToken: boolean = false +): Promise<{ id: string; username: string; role: string } | null> { + // Check session-based auth + if (req.session?.userId) { + try { + const user = await prisma.user.findUnique({ + where: { id: req.session.userId }, + select: { id: true, username: true, role: true }, + }); + if (user) return user; + } catch (error) { + logger.error("Session auth error:", error); + } + } + + // Check for API key in X-API-Key header + const apiKey = req.headers["x-api-key"] as string; + if (apiKey) { + try { + const apiKeyRecord = await prisma.apiKey.findUnique({ + where: { key: apiKey }, + include: { + user: { select: { id: true, username: true, role: true } }, + }, + }); + + if (apiKeyRecord && apiKeyRecord.user) { + // Update last used timestamp (async, don't block) + prisma.apiKey + .update({ + where: { id: apiKeyRecord.id }, + data: { lastUsed: new Date() }, + }) + .catch(() => {}); + + return apiKeyRecord.user; + } + } catch (error) { + logger.error("API key auth error:", error); + } + } + + // Check for token in query param (for streaming URLs) + if (checkQueryToken) { + const tokenParam = req.query.token as string; + if (tokenParam) { + try { + const decoded = jwt.verify( + tokenParam, + JWT_SECRET_VALIDATED + ) as unknown as JWTPayload; + const user = await prisma.user.findUnique({ + where: { id: decoded.userId }, + select: { id: true, username: true, role: true, tokenVersion: true }, + }); + if (user) { + // Validate tokenVersion - reject if password was changed + if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) { + return null; + } + return { id: user.id, username: user.username, role: user.role }; + } + } catch (error) { + // Token invalid, try other methods + } + } + } + + // Check JWT token in Authorization header + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") + ? authHeader.substring(7) + : null; + + if (token) { + try { + const decoded = jwt.verify(token, JWT_SECRET_VALIDATED) as unknown as JWTPayload; + const user = await prisma.user.findUnique({ + where: { id: decoded.userId }, + select: { id: true, username: true, role: true, tokenVersion: true }, + }); + if (user) { + // Validate tokenVersion - reject if password was changed + if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) { + return null; + } + return { id: user.id, username: user.username, role: user.role }; + } + } catch (error) { + // Token invalid + } + } + + return null; +} + export async function requireAuth( req: Request, res: Response, next: NextFunction ) { - // First, check session-based auth (primary method) - if (req.session?.userId) { - try { - const user = await prisma.user.findUnique({ - where: { id: req.session.userId }, - select: { id: true, username: true, role: true }, - }); - - if (user) { - req.user = user; - return next(); - } - } catch (error) { - console.error("Session auth error:", error); - } + const user = await authenticateRequest(req, false); + if (user) { + req.user = user; + return next(); } - - // Check for API key in X-API-Key header (for mobile/external apps) - const apiKey = req.headers["x-api-key"] as string; - if (apiKey) { - try { - const apiKeyRecord = await prisma.apiKey.findUnique({ - where: { key: apiKey }, - include: { user: { select: { id: true, username: true, role: true } } }, - }); - - if (apiKeyRecord && apiKeyRecord.user) { - // Update last used timestamp (async, don't block) - prisma.apiKey.update({ - where: { id: apiKeyRecord.id }, - data: { lastUsed: new Date() }, - }).catch(() => {}); // Ignore errors on lastUsed update - - req.user = apiKeyRecord.user; - return next(); - } - } catch (error) { - console.error("API key auth error:", error); - } - } - - // Fallback: check JWT token in Authorization header - const authHeader = req.headers.authorization; - const token = authHeader?.startsWith("Bearer ") ? authHeader.substring(7) : null; - - if (token) { - try { - const decoded = jwt.verify(token, JWT_SECRET) as JWTPayload; - const user = await prisma.user.findUnique({ - where: { id: decoded.userId }, - select: { id: true, username: true, role: true }, - }); - - if (user) { - req.user = user; - return next(); - } - } catch (error) { - // Token invalid, continue to error - } - } - return res.status(401).json({ error: "Not authenticated" }); } -export async function requireAdmin(req: Request, res: Response, next: NextFunction) { +export async function requireAdmin( + req: Request, + res: Response, + next: NextFunction +) { if (!req.user || req.user.role !== "admin") { return res.status(403).json({ error: "Admin access required" }); } @@ -133,7 +223,7 @@ export async function requireAuthOrToken( return next(); } } catch (error) { - console.error("Session auth error:", error); + logger.error("Session auth error:", error); } } @@ -143,21 +233,25 @@ export async function requireAuthOrToken( try { const apiKeyRecord = await prisma.apiKey.findUnique({ where: { key: apiKey }, - include: { user: { select: { id: true, username: true, role: true } } }, + include: { + user: { select: { id: true, username: true, role: true } }, + }, }); if (apiKeyRecord && apiKeyRecord.user) { // Update last used timestamp (async, don't block) - prisma.apiKey.update({ - where: { id: apiKeyRecord.id }, - data: { lastUsed: new Date() }, - }).catch(() => {}); // Ignore errors on lastUsed update + prisma.apiKey + .update({ + where: { id: apiKeyRecord.id }, + data: { lastUsed: new Date() }, + }) + .catch(() => {}); // Ignore errors on lastUsed update req.user = apiKeyRecord.user; return next(); } } catch (error) { - console.error("API key auth error:", error); + logger.error("API key auth error:", error); } } @@ -165,15 +259,20 @@ export async function requireAuthOrToken( const tokenParam = req.query.token as string; if (tokenParam) { try { - const decoded = jwt.verify(tokenParam, JWT_SECRET) as JWTPayload; + const decoded = jwt.verify(tokenParam, JWT_SECRET_VALIDATED) as unknown as JWTPayload; const user = await prisma.user.findUnique({ where: { id: decoded.userId }, - select: { id: true, username: true, role: true }, + select: { id: true, username: true, role: true, tokenVersion: true }, }); if (user) { - req.user = user; - return next(); + // Validate tokenVersion - reject if password was changed + if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) { + // Token was issued before password change, reject + } else { + req.user = { id: user.id, username: user.username, role: user.role }; + return next(); + } } } catch (error) { // Token invalid, try other methods @@ -182,19 +281,26 @@ export async function requireAuthOrToken( // Fallback: check JWT token in Authorization header const authHeader = req.headers.authorization; - const token = authHeader?.startsWith("Bearer ") ? authHeader.substring(7) : null; + const token = authHeader?.startsWith("Bearer ") + ? authHeader.substring(7) + : null; if (token) { try { - const decoded = jwt.verify(token, JWT_SECRET) as JWTPayload; + const decoded = jwt.verify(token, JWT_SECRET_VALIDATED) as unknown as JWTPayload; const user = await prisma.user.findUnique({ where: { id: decoded.userId }, - select: { id: true, username: true, role: true }, + select: { id: true, username: true, role: true, tokenVersion: true }, }); if (user) { - req.user = user; - return next(); + // Validate tokenVersion - reject if password was changed + if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) { + // Token was issued before password change, reject + } else { + req.user = { id: user.id, username: user.username, role: user.role }; + return next(); + } } } catch (error) { // Token invalid, continue to error diff --git a/backend/src/middleware/errorHandler.ts b/backend/src/middleware/errorHandler.ts index 3dde8b1..0603ba1 100644 --- a/backend/src/middleware/errorHandler.ts +++ b/backend/src/middleware/errorHandler.ts @@ -1,4 +1,7 @@ import { Request, Response, NextFunction } from "express"; +import { logger } from "../utils/logger"; +import { AppError, ErrorCategory } from "../utils/errors"; +import { config } from "../config"; export function errorHandler( err: Error, @@ -6,6 +9,43 @@ export function errorHandler( res: Response, next: NextFunction ) { - console.error(err.stack); - res.status(500).json({ error: "Internal server error" }); + // Handle AppError with proper categorization + if (err instanceof AppError) { + // Map error category to HTTP status code + let statusCode = 500; + switch (err.category) { + case ErrorCategory.RECOVERABLE: + statusCode = 400; // Bad Request - client can retry with changes + break; + case ErrorCategory.TRANSIENT: + statusCode = 503; // Service Unavailable - client can retry later + break; + case ErrorCategory.FATAL: + statusCode = 500; // Internal Server Error - cannot recover + break; + } + + logger.error(`[AppError] ${err.code}: ${err.message}`, err.details); + + return res.status(statusCode).json({ + error: err.message, + code: err.code, + category: err.category, + ...(config.nodeEnv === "development" && { details: err.details }), + }); + } + + // Log stack trace for unhandled errors + logger.error("Unhandled error:", err.stack); + + // In production, hide stack traces and internal details + if (config.nodeEnv === "production") { + return res.status(500).json({ error: "Internal server error" }); + } + + // In development, provide more details + res.status(500).json({ + error: err.message || "Internal server error", + stack: err.stack, + }); } diff --git a/backend/src/routes/analysis.ts b/backend/src/routes/analysis.ts index 0cb9d89..91612b0 100644 --- a/backend/src/routes/analysis.ts +++ b/backend/src/routes/analysis.ts @@ -1,7 +1,10 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; import { redisClient } from "../utils/redis"; import { requireAuth, requireAdmin } from "../middleware/auth"; +import { getSystemSettings } from "../utils/systemSettings"; +import os from "os"; const router = Router(); @@ -42,7 +45,7 @@ router.get("/status", requireAuth, async (req, res) => { isComplete: pending === 0 && processing === 0 && queueLength === 0, }); } catch (error: any) { - console.error("Analysis status error:", error); + logger.error("Analysis status error:", error); res.status(500).json({ error: "Failed to get analysis status" }); } }); @@ -87,14 +90,14 @@ router.post("/start", requireAuth, requireAdmin, async (req, res) => { } await pipeline.exec(); - console.log(`Queued ${tracks.length} tracks for audio analysis`); + logger.debug(`Queued ${tracks.length} tracks for audio analysis`); res.json({ message: `Queued ${tracks.length} tracks for analysis`, queued: tracks.length, }); } catch (error: any) { - console.error("Analysis start error:", error); + logger.error("Analysis start error:", error); res.status(500).json({ error: "Failed to start analysis" }); } }); @@ -121,7 +124,7 @@ router.post("/retry-failed", requireAuth, requireAdmin, async (req, res) => { reset: result.count, }); } catch (error: any) { - console.error("Retry failed error:", error); + logger.error("Retry failed error:", error); res.status(500).json({ error: "Failed to retry analysis" }); } }); @@ -166,7 +169,7 @@ router.post("/analyze/:trackId", requireAuth, async (req, res) => { trackId, }); } catch (error: any) { - console.error("Analyze track error:", error); + logger.error("Analyze track error:", error); res.status(500).json({ error: "Failed to queue track for analysis" }); } }); @@ -214,7 +217,7 @@ router.get("/track/:trackId", requireAuth, async (req, res) => { res.json(track); } catch (error: any) { - console.error("Get track analysis error:", error); + logger.error("Get track analysis error:", error); res.status(500).json({ error: "Failed to get track analysis" }); } }); @@ -280,14 +283,77 @@ router.get("/features", requireAuth, async (req, res) => { }, }); } catch (error: any) { - console.error("Get features error:", error); + logger.error("Get features error:", error); res.status(500).json({ error: "Failed to get feature statistics" }); } }); +/** + * GET /api/analysis/workers + * Get current audio analyzer worker configuration + */ +router.get("/workers", requireAuth, requireAdmin, async (req, res) => { + try { + const settings = await getSystemSettings(); + const cpuCores = os.cpus().length; + const currentWorkers = settings?.audioAnalyzerWorkers || 2; + + // Recommended: 50% of CPU cores, min 2, max 8 + const recommended = Math.max(2, Math.min(8, Math.floor(cpuCores / 2))); + + res.json({ + workers: currentWorkers, + cpuCores, + recommended, + description: `Using ${currentWorkers} of ${cpuCores} available CPU cores`, + }); + } catch (error: any) { + logger.error("Get workers config error:", error); + res.status(500).json({ error: "Failed to get worker configuration" }); + } +}); + +/** + * PUT /api/analysis/workers + * Update audio analyzer worker count + */ +router.put("/workers", requireAuth, requireAdmin, async (req, res) => { + try { + const { workers } = req.body; + + if (typeof workers !== 'number' || workers < 1 || workers > 8) { + return res.status(400).json({ + error: "Workers must be a number between 1 and 8" + }); + } + + // Update SystemSettings + await prisma.systemSettings.update({ + where: { id: "default" }, + data: { audioAnalyzerWorkers: workers }, + }); + + // Publish control signal to Redis for Python worker to pick up + await redisClient.publish( + "audio:analysis:control", + JSON.stringify({ command: "set_workers", count: workers }) + ); + + const cpuCores = os.cpus().length; + const recommended = Math.max(2, Math.min(8, Math.floor(cpuCores / 2))); + + logger.info(`Audio analyzer workers updated to ${workers}`); + + res.json({ + workers, + cpuCores, + recommended, + description: `Using ${workers} of ${cpuCores} available CPU cores`, + }); + } catch (error: any) { + logger.error("Update workers config error:", error); + res.status(500).json({ error: "Failed to update worker configuration" }); + } +}); + export default router; - - - - - diff --git a/backend/src/routes/apiKeys.ts b/backend/src/routes/apiKeys.ts index 52fdf34..7d8374b 100644 --- a/backend/src/routes/apiKeys.ts +++ b/backend/src/routes/apiKeys.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; import crypto from "crypto"; @@ -88,7 +89,7 @@ router.post("/", async (req, res) => { }, }); - console.log(`API key created for user ${userId}: ${deviceName}`); + logger.debug(`API key created for user ${userId}: ${deviceName}`); res.status(201).json({ apiKey: apiKey.key, @@ -98,7 +99,7 @@ router.post("/", async (req, res) => { "API key created successfully. Save this key - you won't see it again!", }); } catch (error) { - console.error("Create API key error:", error); + logger.error("Create API key error:", error); res.status(500).json({ error: "Failed to create API key" }); } }); @@ -152,7 +153,7 @@ router.get("/", async (req, res) => { res.json({ apiKeys: keys }); } catch (error) { - console.error("List API keys error:", error); + logger.error("List API keys error:", error); res.status(500).json({ error: "Failed to list API keys" }); } }); @@ -219,11 +220,11 @@ router.delete("/:id", async (req, res) => { .json({ error: "API key not found or already deleted" }); } - console.log(`API key ${keyId} revoked by user ${userId}`); + logger.debug(`API key ${keyId} revoked by user ${userId}`); res.json({ message: "API key revoked successfully" }); } catch (error) { - console.error("Delete API key error:", error); + logger.error("Delete API key error:", error); res.status(500).json({ error: "Failed to revoke API key" }); } }); diff --git a/backend/src/routes/artists.ts b/backend/src/routes/artists.ts index 651da6a..b060f34 100644 --- a/backend/src/routes/artists.ts +++ b/backend/src/routes/artists.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { lastFmService } from "../services/lastfm"; import { musicBrainzService } from "../services/musicbrainz"; import { fanartService } from "../services/fanart"; @@ -17,7 +18,7 @@ router.get("/preview/:artistName/:trackTitle", async (req, res) => { const decodedArtist = decodeURIComponent(artistName); const decodedTrack = decodeURIComponent(trackTitle); - console.log( + logger.debug( `Getting preview for "${decodedTrack}" by ${decodedArtist}` ); @@ -32,7 +33,7 @@ router.get("/preview/:artistName/:trackTitle", async (req, res) => { res.status(404).json({ error: "Preview not found" }); } } catch (error: any) { - console.error("Preview fetch error:", error); + logger.error("Preview fetch error:", error); res.status(500).json({ error: "Failed to fetch preview", message: error.message, @@ -50,7 +51,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(`[Discovery] Cache hit for artist: ${nameOrMbid}`); + logger.debug(`[Discovery] Cache hit for artist: ${nameOrMbid}`); return res.json(JSON.parse(cached)); } } catch (err) { @@ -108,7 +109,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => { lowerBio.includes("multiple artists") ) { // This is a disambiguation page - don't show it - console.log( + logger.debug( ` Filtered out disambiguation biography for ${artistName}` ); bio = null; @@ -125,7 +126,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => { 10 ); } catch (error) { - console.log(`Failed to get top tracks for ${artistName}`); + logger.debug(`Failed to get top tracks for ${artistName}`); } } @@ -136,9 +137,9 @@ router.get("/discover/:nameOrMbid", async (req, res) => { if (mbid) { try { image = await fanartService.getArtistImage(mbid); - console.log(`Fanart.tv image for ${artistName}`); + logger.debug(`Fanart.tv image for ${artistName}`); } catch (error) { - console.log( + logger.debug( `✗ Failed to get Fanart.tv image for ${artistName}` ); } @@ -149,10 +150,10 @@ router.get("/discover/:nameOrMbid", async (req, res) => { try { image = await deezerService.getArtistImage(artistName); if (image) { - console.log(`Deezer image for ${artistName}`); + logger.debug(`Deezer image for ${artistName}`); } } catch (error) { - console.log(`✗ Failed to get Deezer image for ${artistName}`); + logger.debug(` Failed to get Deezer image for ${artistName}`); } } @@ -165,9 +166,9 @@ router.get("/discover/:nameOrMbid", async (req, res) => { !lastFmImage.includes("2a96cbd8b46e442fc41c2b86b821562f") ) { image = lastFmImage; - console.log(`Last.fm image for ${artistName}`); + logger.debug(`Last.fm image for ${artistName}`); } else { - console.log(`✗ Last.fm returned placeholder for ${artistName}`); + logger.debug(` Last.fm returned placeholder for ${artistName}`); } } @@ -265,7 +266,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => { return 0; }); } catch (error) { - console.error( + logger.error( `Failed to get discography for ${artistName}:`, error ); @@ -355,14 +356,14 @@ router.get("/discover/:nameOrMbid", async (req, res) => { DISCOVERY_CACHE_TTL, JSON.stringify(response) ); - console.log(`[Discovery] Cached artist: ${artistName}`); + logger.debug(`[Discovery] Cached artist: ${artistName}`); } catch (err) { // Redis errors are non-critical } res.json(response); } catch (error: any) { - console.error("Artist discovery error:", error); + logger.error("Artist discovery error:", error); res.status(500).json({ error: "Failed to fetch artist details", message: error.message, @@ -380,7 +381,7 @@ router.get("/album/:mbid", async (req, res) => { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(`[Discovery] Cache hit for album: ${mbid}`); + logger.debug(`[Discovery] Cache hit for album: ${mbid}`); return res.json(JSON.parse(cached)); } } catch (err) { @@ -397,7 +398,7 @@ router.get("/album/:mbid", async (req, res) => { } catch (error: any) { // If 404, try as a release instead if (error.response?.status === 404) { - console.log( + logger.debug( `${mbid} is not a release-group, trying as release...` ); release = await musicBrainzService.getRelease(mbid); @@ -410,7 +411,7 @@ router.get("/album/:mbid", async (req, res) => { releaseGroupId ); } catch (err) { - console.error( + logger.error( `Failed to get release-group ${releaseGroupId}` ); } @@ -439,7 +440,7 @@ router.get("/album/:mbid", async (req, res) => { albumTitle ); } catch (error) { - console.log(`Failed to get Last.fm info for ${albumTitle}`); + logger.debug(`Failed to get Last.fm info for ${albumTitle}`); } // Get tracks - if we have release, use it directly; otherwise get first release from group @@ -454,7 +455,7 @@ router.get("/album/:mbid", async (req, res) => { ); tracks = releaseDetails.media?.[0]?.tracks || []; } catch (error) { - console.error( + logger.error( `Failed to get tracks for release ${firstRelease.id}` ); } @@ -472,14 +473,14 @@ router.get("/album/:mbid", async (req, res) => { const response = await fetch(coverArtUrl, { method: "HEAD" }); if (response.ok) { coverUrl = coverArtUrl; - console.log(`Cover Art Archive has cover for ${albumTitle}`); + logger.debug(`Cover Art Archive has cover for ${albumTitle}`); } else { - console.log( + logger.debug( `✗ Cover Art Archive 404 for ${albumTitle}, trying Deezer...` ); } } catch (error) { - console.log( + logger.debug( `✗ Cover Art Archive check failed for ${albumTitle}, trying Deezer...` ); } @@ -493,13 +494,13 @@ router.get("/album/:mbid", async (req, res) => { ); if (deezerCover) { coverUrl = deezerCover; - console.log(`Deezer has cover for ${albumTitle}`); + logger.debug(`Deezer has cover for ${albumTitle}`); } else { // Final fallback to Cover Art Archive URL (might 404, but better than nothing) coverUrl = coverArtUrl; } } catch (error) { - console.log(`✗ Deezer lookup failed for ${albumTitle}`); + logger.debug(` Deezer lookup failed for ${albumTitle}`); // Final fallback to Cover Art Archive URL coverUrl = coverArtUrl; } @@ -548,14 +549,14 @@ router.get("/album/:mbid", async (req, res) => { DISCOVERY_CACHE_TTL, JSON.stringify(response) ); - console.log(`[Discovery] Cached album: ${albumTitle}`); + logger.debug(`[Discovery] Cached album: ${albumTitle}`); } catch (err) { // Redis errors are non-critical } res.json(response); } catch (error: any) { - console.error("Album discovery error:", error); + logger.error("Album discovery error:", error); res.status(500).json({ error: "Failed to fetch album details", message: error.message, diff --git a/backend/src/routes/audiobooks.ts b/backend/src/routes/audiobooks.ts index b44f1c9..89d1af3 100644 --- a/backend/src/routes/audiobooks.ts +++ b/backend/src/routes/audiobooks.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { audiobookshelfService } from "../services/audiobookshelf"; import { audiobookCacheService } from "../services/audiobookCache"; import { prisma } from "../utils/db"; @@ -57,7 +58,7 @@ router.get( res.json(transformed); } catch (error: any) { - console.error("Error fetching continue listening:", error); + logger.error("Error fetching continue listening:", error); res.status(500).json({ error: "Failed to fetch continue listening", message: error.message, @@ -83,14 +84,14 @@ router.post("/sync", requireAuthOrToken, apiLimiter, async (req, res) => { .json({ error: "Audiobookshelf not enabled" }); } - console.log("[Audiobooks] Starting manual audiobook sync..."); + logger.debug("[Audiobooks] Starting manual audiobook sync..."); const result = await audiobookCacheService.syncAll(); // Check how many have series after sync const seriesCount = await prisma.audiobook.count({ where: { series: { not: null } }, }); - console.log( + logger.debug( `[Audiobooks] Sync complete. Books with series: ${seriesCount}` ); @@ -108,7 +109,7 @@ router.post("/sync", requireAuthOrToken, apiLimiter, async (req, res) => { result, }); } catch (error: any) { - console.error("Audiobook sync failed:", error); + logger.error("Audiobook sync failed:", error); res.status(500).json({ error: "Sync failed", message: error.message, @@ -122,7 +123,7 @@ router.post("/sync", requireAuthOrToken, apiLimiter, async (req, res) => { */ // Debug endpoint for series data router.get("/debug-series", requireAuthOrToken, async (req, res) => { - console.log("[Audiobooks] Debug series endpoint called"); + logger.debug("[Audiobooks] Debug series endpoint called"); try { const { getSystemSettings } = await import("../utils/systemSettings"); const settings = await getSystemSettings(); @@ -135,7 +136,7 @@ router.get("/debug-series", requireAuthOrToken, async (req, res) => { // Get raw data from Audiobookshelf const rawBooks = await audiobookshelfService.getAllAudiobooks(); - console.log( + logger.debug( `[Audiobooks] Got ${rawBooks.length} books from Audiobookshelf` ); @@ -145,7 +146,7 @@ router.get("/debug-series", requireAuthOrToken, async (req, res) => { return metadata.series || metadata.seriesName; }); - console.log( + logger.debug( `[Audiobooks] Books with series data: ${booksWithSeries.length}` ); @@ -179,7 +180,7 @@ router.get("/debug-series", requireAuthOrToken, async (req, res) => { fullSampleWithSeries: fullSample, }); } catch (error: any) { - console.error("[Audiobooks] Debug series error:", error); + logger.error("[Audiobooks] Debug series error:", error); res.status(500).json({ error: error.message }); } }); @@ -207,7 +208,7 @@ router.get("/search", requireAuthOrToken, apiLimiter, async (req, res) => { const results = await audiobookshelfService.searchAudiobooks(q); res.json(results); } catch (error: any) { - console.error("Error searching audiobooks:", error); + logger.error("Error searching audiobooks:", error); res.status(500).json({ error: "Failed to search audiobooks", message: error.message, @@ -220,7 +221,7 @@ router.get("/search", requireAuthOrToken, apiLimiter, async (req, res) => { * Get all audiobooks from cached database (instant, no API calls) */ router.get("/", requireAuthOrToken, apiLimiter, async (req, res) => { - console.log("[Audiobooks] GET / - fetching audiobooks list"); + logger.debug("[Audiobooks] GET / - fetching audiobooks list"); try { // Check if Audiobookshelf is enabled first const { getSystemSettings } = await import("../utils/systemSettings"); @@ -296,7 +297,7 @@ router.get("/", requireAuthOrToken, apiLimiter, async (req, res) => { res.json(audiobooksWithProgress); } catch (error: any) { - console.error("Error fetching audiobooks:", error); + logger.error("Error fetching audiobooks:", error); res.status(500).json({ error: "Failed to fetch audiobooks", message: error.message, @@ -394,7 +395,7 @@ router.get( res.json(seriesBooks); } catch (error: any) { - console.error("Error fetching series:", error); + logger.error("Error fetching series:", error); res.status(500).json({ error: "Failed to fetch series", message: error.message, @@ -419,7 +420,7 @@ router.options("/:id/cover", (req, res) => { /** * GET /audiobooks/:id/cover - * Serve cached cover image from local disk (instant, no proxying) + * Serve cached cover image from local disk, or proxy from Audiobookshelf if not cached * NO RATE LIMITING - These are static files served from disk with aggressive caching */ router.get("/:id/cover", async (req, res) => { @@ -431,7 +432,7 @@ router.get("/:id/cover", async (req, res) => { const audiobook = await prisma.audiobook.findUnique({ where: { id }, - select: { localCoverPath: true }, + select: { localCoverPath: true, coverUrl: true }, }); let coverPath = audiobook?.localCoverPath; @@ -456,25 +457,54 @@ router.get("/:id/cover", async (req, res) => { } } - if (!coverPath) { - return res.status(404).json({ error: "Cover not found" }); + // If local cover exists, serve it + if (coverPath && fs.existsSync(coverPath)) { + const origin = req.headers.origin || "http://localhost:3030"; + res.setHeader("Cache-Control", "public, max-age=31536000, immutable"); + res.setHeader("Access-Control-Allow-Origin", origin); + res.setHeader("Access-Control-Allow-Credentials", "true"); + res.setHeader("Cross-Origin-Resource-Policy", "cross-origin"); + return res.sendFile(coverPath); } - // Verify file exists before sending - if (!fs.existsSync(coverPath)) { - return res.status(404).json({ error: "Cover file missing" }); + // Fallback: proxy from Audiobookshelf if coverUrl is available + if (audiobook?.coverUrl) { + const { getSystemSettings } = await import("../utils/systemSettings"); + const settings = await getSystemSettings(); + + if (settings?.audiobookshelfUrl && settings?.audiobookshelfApiKey) { + const baseUrl = settings.audiobookshelfUrl.replace(/\/$/, ""); + const coverApiUrl = `${baseUrl}/api/${audiobook.coverUrl}`; + + try { + const response = await fetch(coverApiUrl, { + headers: { + Authorization: `Bearer ${settings.audiobookshelfApiKey}`, + }, + }); + + if (response.ok) { + const origin = req.headers.origin || "http://localhost:3030"; + res.setHeader("Content-Type", response.headers.get("content-type") || "image/jpeg"); + res.setHeader("Cache-Control", "public, max-age=86400"); // 24 hours for proxied + res.setHeader("Access-Control-Allow-Origin", origin); + res.setHeader("Access-Control-Allow-Credentials", "true"); + res.setHeader("Cross-Origin-Resource-Policy", "cross-origin"); + + // Stream the response body to client + const buffer = await response.arrayBuffer(); + return res.send(Buffer.from(buffer)); + } + } catch (proxyError: any) { + logger.error(`[Audiobook Cover] Proxy error for ${id}:`, proxyError.message); + } + } } - // Serve image from local disk with aggressive caching and CORS headers - // Use specific origin instead of * to support credentials mode - const origin = req.headers.origin || "http://localhost:3030"; - res.setHeader("Cache-Control", "public, max-age=31536000, immutable"); - res.setHeader("Access-Control-Allow-Origin", origin); - res.setHeader("Access-Control-Allow-Credentials", "true"); - res.setHeader("Cross-Origin-Resource-Policy", "cross-origin"); - res.sendFile(coverPath); + // No cover available + return res.status(404).json({ error: "Cover not found" }); } catch (error: any) { - console.error("Error serving cover:", error); + logger.error("Error serving cover:", error); res.status(500).json({ error: "Failed to serve cover", message: error.message, @@ -509,18 +539,22 @@ router.get("/:id", requireAuthOrToken, apiLimiter, async (req, res) => { audiobook.lastSyncedAt < new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) ) { - console.log( + logger.debug( `[AUDIOBOOK] Audiobook ${id} not cached or stale, fetching...` ); audiobook = await audiobookCacheService.getAudiobook(id); } + if (!audiobook) { + return res.status(404).json({ error: "Audiobook not found" }); + } + // Get chapters and audio files from API (these change less frequently) let absBook; try { absBook = await audiobookshelfService.getAudiobook(id); } catch (apiError: any) { - console.warn( + logger.warn( ` Failed to fetch live data from Audiobookshelf for ${id}, using cached data only:`, apiError.message ); @@ -567,7 +601,7 @@ router.get("/:id", requireAuthOrToken, apiLimiter, async (req, res) => { res.json(response); } catch (error: any) { - console.error("Error fetching audiobook__", error); + logger.error("Error fetching audiobook__", error); res.status(500).json({ error: "Failed to fetch audiobook", message: error.message, @@ -581,17 +615,17 @@ router.get("/:id", requireAuthOrToken, apiLimiter, async (req, res) => { */ router.get("/:id/stream", requireAuthOrToken, async (req, res) => { try { - console.log( + logger.debug( `[Audiobook Stream] Request for audiobook: ${req.params.id}` ); - console.log(`[Audiobook Stream] User: ${req.user?.id || "unknown"}`); + logger.debug(`[Audiobook Stream] User: ${req.user?.id || "unknown"}`); // Check if Audiobookshelf is enabled const { getSystemSettings } = await import("../utils/systemSettings"); const settings = await getSystemSettings(); if (!settings?.audiobookshelfEnabled) { - console.log("[Audiobook Stream] Audiobookshelf not enabled"); + logger.debug("[Audiobook Stream] Audiobookshelf not enabled"); return res .status(503) .json({ error: "Audiobookshelf is not configured" }); @@ -600,7 +634,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => { const { id } = req.params; const rangeHeader = req.headers.range as string | undefined; - console.log( + logger.debug( `[Audiobook Stream] Fetching stream for ${id}, range: ${ rangeHeader || "none" }` @@ -609,7 +643,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => { const { stream, headers, status } = await audiobookshelfService.streamAudiobook(id, rangeHeader); - console.log( + logger.debug( `[Audiobook Stream] Got stream, status: ${status}, content-type: ${headers["content-type"]}` ); @@ -645,7 +679,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => { stream.pipe(res); stream.on("error", (error: any) => { - console.error("[Audiobook Stream] Stream error:", error); + logger.error("[Audiobook Stream] Stream error:", error); if (!res.headersSent) { res.status(500).json({ error: "Failed to stream audiobook", @@ -656,7 +690,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => { } }); } catch (error: any) { - console.error("[Audiobook Stream] Error:", error.message); + logger.error("[Audiobook Stream] Error:", error.message); res.status(500).json({ error: "Failed to stream audiobook", message: error.message, @@ -704,30 +738,30 @@ router.post( ? Math.max(rawDuration, 0) : 0; - console.log(`\n [AUDIOBOOK PROGRESS] Received update:`); - console.log(` User: ${req.user!.username}`); - console.log(` Audiobook ID: ${id}`); - console.log( + logger.debug(`\n [AUDIOBOOK PROGRESS] Received update:`); + logger.debug(` User: ${req.user!.username}`); + logger.debug(` Audiobook ID: ${id}`); + logger.debug( ` Current Time: ${currentTime}s (${Math.floor( currentTime / 60 )} mins)` ); - console.log( + logger.debug( ` Duration: ${durationValue}s (${Math.floor( durationValue / 60 )} mins)` ); if (durationValue > 0) { - console.log( + logger.debug( ` Progress: ${( (currentTime / durationValue) * 100 ).toFixed(1)}%` ); } else { - console.log(" Progress: duration unknown"); + logger.debug(" Progress: duration unknown"); } - console.log(` Finished: ${!!isFinished}`); + logger.debug(` Finished: ${!!isFinished}`); // Pull cached metadata to avoid hitting Audiobookshelf for every update const [cachedAudiobook, existingProgress] = await Promise.all([ @@ -799,7 +833,7 @@ router.post( }, }); - console.log(` Progress saved to database`); + logger.debug(` Progress saved to database`); // Also update progress in Audiobookshelf try { @@ -809,9 +843,9 @@ router.post( fallbackDuration, isFinished ); - console.log(` Progress synced to Audiobookshelf`); + logger.debug(` Progress synced to Audiobookshelf`); } catch (error) { - console.error( + logger.error( "Failed to sync progress to Audiobookshelf:", error ); @@ -830,7 +864,7 @@ router.post( }, }); } catch (error: any) { - console.error("Error updating progress:", error); + logger.error("Error updating progress:", error); res.status(500).json({ error: "Failed to update progress", message: error.message, @@ -864,9 +898,9 @@ router.delete( const { id } = req.params; - console.log(`\n[AUDIOBOOK PROGRESS] Removing progress:`); - console.log(` User: ${req.user!.username}`); - console.log(` Audiobook ID: ${id}`); + logger.debug(`\n[AUDIOBOOK PROGRESS] Removing progress:`); + logger.debug(` User: ${req.user!.username}`); + logger.debug(` Audiobook ID: ${id}`); // Delete progress from our database await prisma.audiobookProgress.deleteMany({ @@ -876,14 +910,14 @@ router.delete( }, }); - console.log(` Progress removed from database`); + logger.debug(` Progress removed from database`); // Also remove progress from Audiobookshelf try { await audiobookshelfService.updateProgress(id, 0, 0, false); - console.log(` Progress reset in Audiobookshelf`); + logger.debug(` Progress reset in Audiobookshelf`); } catch (error) { - console.error( + logger.error( "Failed to reset progress in Audiobookshelf:", error ); @@ -895,7 +929,7 @@ router.delete( message: "Progress removed", }); } catch (error: any) { - console.error("Error removing progress:", error); + logger.error("Error removing progress:", error); res.status(500).json({ error: "Failed to remove progress", message: error.message, diff --git a/backend/src/routes/auth.ts b/backend/src/routes/auth.ts index 1463d34..9ea4c1b 100644 --- a/backend/src/routes/auth.ts +++ b/backend/src/routes/auth.ts @@ -1,11 +1,13 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import bcrypt from "bcrypt"; import { prisma } from "../utils/db"; import { z } from "zod"; import speakeasy from "speakeasy"; import QRCode from "qrcode"; import crypto from "crypto"; -import { requireAuth, requireAdmin, generateToken } from "../middleware/auth"; +import jwt from "jsonwebtoken"; +import { requireAuth, requireAdmin, generateToken, generateRefreshToken } from "../middleware/auth"; import { encrypt, decrypt } from "../utils/encryption"; const router = Router(); @@ -119,15 +121,21 @@ router.post("/login", async (req, res) => { } } - // Generate JWT token + // Generate JWT tokens const jwtToken = generateToken({ id: user.id, username: user.username, role: user.role, + tokenVersion: user.tokenVersion, + }); + const refreshToken = generateRefreshToken({ + id: user.id, + tokenVersion: user.tokenVersion, }); res.json({ token: jwtToken, + refreshToken: refreshToken, user: { id: user.id, username: user.username, @@ -138,7 +146,7 @@ router.post("/login", async (req, res) => { if (err instanceof z.ZodError) { return res.status(400).json({ error: "Invalid request", details: err.errors }); } - console.error("Login error:", err); + logger.error("Login error:", err); res.status(500).json({ error: "Internal error" }); } }); @@ -150,6 +158,47 @@ router.post("/logout", (req, res) => { res.json({ message: "Logged out" }); }); +// POST /auth/refresh - Refresh access token using refresh token +router.post("/refresh", async (req, res) => { + const { refreshToken } = req.body; + + if (!refreshToken) { + return res.status(400).json({ error: "Refresh token required" }); + } + + try { + const decoded = jwt.verify(refreshToken, process.env.JWT_SECRET || process.env.SESSION_SECRET!) as any; + + if (decoded.type !== "refresh") { + return res.status(401).json({ error: "Invalid refresh token" }); + } + + const user = await prisma.user.findUnique({ + where: { id: decoded.userId }, + select: { id: true, username: true, role: true, tokenVersion: true } + }); + + if (!user) { + return res.status(401).json({ error: "User not found" }); + } + + // Validate tokenVersion + if (decoded.tokenVersion !== user.tokenVersion) { + return res.status(401).json({ error: "Token invalidated" }); + } + + const newAccessToken = generateToken(user); + const newRefreshToken = generateRefreshToken(user); + + return res.json({ + token: newAccessToken, + refreshToken: newRefreshToken + }); + } catch (error) { + return res.status(401).json({ error: "Invalid refresh token" }); + } +}); + /** * @openapi * /auth/me: @@ -226,16 +275,19 @@ router.post("/change-password", requireAuth, async (req, res) => { .json({ error: "Current password is incorrect" }); } - // Update password + // Update password and increment tokenVersion to invalidate all existing tokens const newPasswordHash = await bcrypt.hash(newPassword, 10); await prisma.user.update({ where: { id: req.user!.id }, - data: { passwordHash: newPasswordHash }, + data: { + passwordHash: newPasswordHash, + tokenVersion: { increment: 1 } + }, }); res.json({ message: "Password changed successfully" }); } catch (error) { - console.error("Change password error:", error); + logger.error("Change password error:", error); res.status(500).json({ error: "Failed to change password" }); } }); @@ -256,7 +308,7 @@ router.get("/users", requireAuth, requireAdmin, async (req, res) => { res.json(users); } catch (error) { - console.error("Get users error:", error); + logger.error("Get users error:", error); res.status(500).json({ error: "Failed to get users" }); } }); @@ -320,7 +372,7 @@ router.post("/create-user", requireAuth, requireAdmin, async (req, res) => { createdAt: user.createdAt, }); } catch (error) { - console.error("Create user error:", error); + logger.error("Create user error:", error); res.status(500).json({ error: "Failed to create user" }); } }); @@ -344,7 +396,7 @@ router.delete("/users/:id", requireAuth, requireAdmin, async (req, res) => { res.json({ message: "User deleted successfully" }); } catch (error: any) { - console.error("Delete user error:", error); + logger.error("Delete user error:", error); if (error.code === "P2025") { return res.status(404).json({ error: "User not found" }); } @@ -382,7 +434,7 @@ router.post("/2fa/setup", requireAuth, async (req, res) => { qrCode: qrCodeDataUrl, }); } catch (error) { - console.error("2FA setup error:", error); + logger.error("2FA setup error:", error); res.status(500).json({ error: "Failed to setup 2FA" }); } }); @@ -448,7 +500,7 @@ router.post("/2fa/enable", requireAuth, async (req, res) => { recoveryCodes: recoveryCodes, }); } catch (error) { - console.error("2FA enable error:", error); + logger.error("2FA enable error:", error); res.status(500).json({ error: "Failed to enable 2FA" }); } }); @@ -505,7 +557,7 @@ router.post("/2fa/disable", requireAuth, async (req, res) => { res.json({ message: "2FA disabled successfully" }); } catch (error) { - console.error("2FA disable error:", error); + logger.error("2FA disable error:", error); res.status(500).json({ error: "Failed to disable 2FA" }); } }); @@ -524,7 +576,7 @@ router.get("/2fa/status", requireAuth, async (req, res) => { res.json({ enabled: user.twoFactorEnabled }); } catch (error) { - console.error("2FA status error:", error); + logger.error("2FA status error:", error); res.status(500).json({ error: "Failed to get 2FA status" }); } }); diff --git a/backend/src/routes/browse.ts b/backend/src/routes/browse.ts index 012baf2..dd87fb7 100644 --- a/backend/src/routes/browse.ts +++ b/backend/src/routes/browse.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuthOrToken } from "../middleware/auth"; import { spotifyService } from "../services/spotify"; import { deezerService, DeezerPlaylistPreview, DeezerRadioStation } from "../services/deezer"; @@ -68,10 +69,10 @@ function deezerRadioToUnified(radio: DeezerRadioStation): PlaylistPreview { router.get("/playlists/featured", async (req, res) => { try { const limit = Math.min(parseInt(req.query.limit as string) || 50, 200); - console.log(`[Browse] Fetching featured playlists (limit: ${limit})...`); + logger.debug(`[Browse] Fetching featured playlists (limit: ${limit})...`); const playlists = await deezerService.getFeaturedPlaylists(limit); - console.log(`[Browse] Got ${playlists.length} Deezer playlists`); + logger.debug(`[Browse] Got ${playlists.length} Deezer playlists`); res.json({ playlists: playlists.map(deezerPlaylistToUnified), @@ -79,7 +80,7 @@ router.get("/playlists/featured", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Browse featured playlists error:", error); + logger.error("Browse featured playlists error:", error); res.status(500).json({ error: error.message || "Failed to fetch playlists" }); } }); @@ -96,10 +97,10 @@ router.get("/playlists/search", async (req, res) => { } const limit = Math.min(parseInt(req.query.limit as string) || 50, 100); - console.log(`[Browse] Searching playlists for "${query}"...`); + logger.debug(`[Browse] Searching playlists for "${query}"...`); const playlists = await deezerService.searchPlaylists(query, limit); - console.log(`[Browse] Search "${query}": ${playlists.length} results`); + logger.debug(`[Browse] Search "${query}": ${playlists.length} results`); res.json({ playlists: playlists.map(deezerPlaylistToUnified), @@ -108,7 +109,7 @@ router.get("/playlists/search", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Browse search playlists error:", error); + logger.error("Browse search playlists error:", error); res.status(500).json({ error: error.message || "Failed to search playlists" }); } }); @@ -132,7 +133,7 @@ router.get("/playlists/:id", async (req, res) => { url: `https://www.deezer.com/playlist/${id}`, }); } catch (error: any) { - console.error("Playlist fetch error:", error); + logger.error("Playlist fetch error:", error); res.status(500).json({ error: error.message || "Failed to fetch playlist" }); } }); @@ -147,7 +148,7 @@ router.get("/playlists/:id", async (req, res) => { */ router.get("/radios", async (req, res) => { try { - console.log("[Browse] Fetching radio stations..."); + logger.debug("[Browse] Fetching radio stations..."); const radios = await deezerService.getRadioStations(); res.json({ @@ -156,7 +157,7 @@ router.get("/radios", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Browse radios error:", error); + logger.error("Browse radios error:", error); res.status(500).json({ error: error.message || "Failed to fetch radios" }); } }); @@ -167,7 +168,7 @@ router.get("/radios", async (req, res) => { */ router.get("/radios/by-genre", async (req, res) => { try { - console.log("[Browse] Fetching radios by genre..."); + logger.debug("[Browse] Fetching radios by genre..."); const genresWithRadios = await deezerService.getRadiosByGenre(); // Transform to include unified format @@ -183,7 +184,7 @@ router.get("/radios/by-genre", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Browse radios by genre error:", error); + logger.error("Browse radios by genre error:", error); res.status(500).json({ error: error.message || "Failed to fetch radios" }); } }); @@ -195,7 +196,7 @@ router.get("/radios/by-genre", async (req, res) => { router.get("/radios/:id", async (req, res) => { try { const { id } = req.params; - console.log(`[Browse] Fetching radio ${id} tracks...`); + logger.debug(`[Browse] Fetching radio ${id} tracks...`); const radioPlaylist = await deezerService.getRadioTracks(id); @@ -209,7 +210,7 @@ router.get("/radios/:id", async (req, res) => { type: "radio", }); } catch (error: any) { - console.error("Radio tracks error:", error); + logger.error("Radio tracks error:", error); res.status(500).json({ error: error.message || "Failed to fetch radio tracks" }); } }); @@ -224,7 +225,7 @@ router.get("/radios/:id", async (req, res) => { */ router.get("/genres", async (req, res) => { try { - console.log("[Browse] Fetching genres..."); + logger.debug("[Browse] Fetching genres..."); const genres = await deezerService.getGenres(); res.json({ @@ -233,7 +234,7 @@ router.get("/genres", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Browse genres error:", error); + logger.error("Browse genres error:", error); res.status(500).json({ error: error.message || "Failed to fetch genres" }); } }); @@ -249,7 +250,7 @@ router.get("/genres/:id", async (req, res) => { return res.status(400).json({ error: "Invalid genre ID" }); } - console.log(`[Browse] Fetching content for genre ${genreId}...`); + logger.debug(`[Browse] Fetching content for genre ${genreId}...`); const content = await deezerService.getEditorialContent(genreId); res.json({ @@ -259,7 +260,7 @@ router.get("/genres/:id", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Genre content error:", error); + logger.error("Genre content error:", error); res.status(500).json({ error: error.message || "Failed to fetch genre content" }); } }); @@ -290,7 +291,7 @@ router.get("/genres/:id/playlists", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Genre playlists error:", error); + logger.error("Genre playlists error:", error); res.status(500).json({ error: error.message || "Failed to fetch genre playlists" }); } }); @@ -337,7 +338,7 @@ router.post("/playlists/parse", async (req, res) => { error: "Invalid or unsupported URL. Please provide a Spotify or Deezer playlist URL." }); } catch (error: any) { - console.error("Parse URL error:", error); + logger.error("Parse URL error:", error); res.status(500).json({ error: error.message || "Failed to parse URL" }); } }); @@ -353,7 +354,7 @@ router.post("/playlists/parse", async (req, res) => { */ router.get("/all", async (req, res) => { try { - console.log("[Browse] Fetching browse content (playlists + genres)..."); + logger.debug("[Browse] Fetching browse content (playlists + genres)..."); // Only fetch playlists and genres - radios are now internal library-based const [playlists, genres] = await Promise.all([ @@ -369,7 +370,7 @@ router.get("/all", async (req, res) => { source: "deezer", }); } catch (error: any) { - console.error("Browse all error:", error); + logger.error("Browse all error:", error); res.status(500).json({ error: error.message || "Failed to fetch browse content" }); } }); diff --git a/backend/src/routes/deviceLink.ts b/backend/src/routes/deviceLink.ts index 85a45af..f3027fd 100644 --- a/backend/src/routes/deviceLink.ts +++ b/backend/src/routes/deviceLink.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuthOrToken } from "../middleware/auth"; import { prisma } from "../utils/db"; import crypto from "crypto"; @@ -64,7 +65,7 @@ router.post("/generate", requireAuthOrToken, async (req, res) => { expiresIn: 300, // 5 minutes in seconds }); } catch (error) { - console.error("Generate device link code error:", error); + logger.error("Generate device link code error:", error); res.status(500).json({ error: "Failed to generate device link code" }); } }); @@ -123,7 +124,7 @@ router.post("/verify", async (req, res) => { username: linkCode.user.username, }); } catch (error) { - console.error("Verify device link code error:", error); + logger.error("Verify device link code error:", error); res.status(500).json({ error: "Failed to verify device link code" }); } }); @@ -161,7 +162,7 @@ router.get("/status/:code", async (req, res) => { expiresAt: linkCode.expiresAt, }); } catch (error) { - console.error("Check device link status error:", error); + logger.error("Check device link status error:", error); res.status(500).json({ error: "Failed to check status" }); } }); @@ -184,7 +185,7 @@ router.get("/devices", requireAuthOrToken, async (req, res) => { res.json(apiKeys); } catch (error) { - console.error("Get devices error:", error); + logger.error("Get devices error:", error); res.status(500).json({ error: "Failed to get devices" }); } }); @@ -209,7 +210,7 @@ router.delete("/devices/:id", requireAuthOrToken, async (req, res) => { res.json({ success: true }); } catch (error) { - console.error("Revoke device error:", error); + logger.error("Revoke device error:", error); res.status(500).json({ error: "Failed to revoke device" }); } }); diff --git a/backend/src/routes/discover.ts b/backend/src/routes/discover.ts index 145717b..094de7d 100644 --- a/backend/src/routes/discover.ts +++ b/backend/src/routes/discover.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuthOrToken } from "../middleware/auth"; import { prisma } from "../utils/db"; import { lastFmService } from "../services/lastfm"; @@ -46,10 +47,17 @@ router.get("/batch-status", async (req, res) => { }); } - const completedJobs = activeBatch.jobs.filter(j => j.status === "completed").length; - const failedJobs = activeBatch.jobs.filter(j => j.status === "failed" || j.status === "exhausted").length; + const completedJobs = activeBatch.jobs.filter( + (j) => j.status === "completed" + ).length; + const failedJobs = activeBatch.jobs.filter( + (j) => j.status === "failed" || j.status === "exhausted" + ).length; const totalJobs = activeBatch.jobs.length; - const progress = totalJobs > 0 ? Math.round(((completedJobs + failedJobs) / totalJobs) * 100) : 0; + const progress = + totalJobs > 0 + ? Math.round(((completedJobs + failedJobs) / totalJobs) * 100) + : 0; res.json({ active: true, @@ -61,7 +69,7 @@ router.get("/batch-status", async (req, res) => { total: totalJobs, }); } catch (error) { - console.error("Get batch status error:", error); + logger.error("Get batch status error:", error); res.status(500).json({ error: "Failed to get batch status" }); } }); @@ -87,7 +95,7 @@ router.post("/generate", async (req, res) => { }); } - console.log(`\n Queuing Discover Weekly generation for user ${userId}`); + logger.debug(`\n Queuing Discover Weekly generation for user ${userId}`); // Add generation job to queue const job = await discoverQueue.add({ userId }); @@ -97,7 +105,7 @@ router.post("/generate", async (req, res) => { jobId: job.id, }); } catch (error) { - console.error("Generate Discover Weekly error:", error); + logger.error("Generate Discover Weekly error:", error); res.status(500).json({ error: "Failed to start generation" }); } }); @@ -121,7 +129,7 @@ router.get("/generate/status/:jobId", async (req, res) => { result, }); } catch (error) { - console.error("Get generation status error:", error); + logger.error("Get generation status error:", error); res.status(500).json({ error: "Failed to get job status" }); } }); @@ -167,7 +175,7 @@ router.get("/current", async (req, res) => { if (discoveryAlbum.tracks && discoveryAlbum.tracks.length > 0) { // Fetch all tracks in one query using their IDs const trackIds = discoveryAlbum.tracks - .map(dt => dt.trackId) + .map((dt) => dt.trackId) .filter((id): id is string => id !== null); if (trackIds.length > 0) { @@ -177,10 +185,14 @@ router.get("/current", async (req, res) => { }); // Create a map for quick lookup - const trackMap = new Map(libraryTracks.map(t => [t.id, t])); + const trackMap = new Map( + libraryTracks.map((t) => [t.id, t]) + ); for (const dt of discoveryAlbum.tracks) { - const track = dt.trackId ? trackMap.get(dt.trackId) : null; + const track = dt.trackId + ? trackMap.get(dt.trackId) + : null; if (track) { tracks.push({ id: track.id, @@ -194,14 +206,18 @@ router.get("/current", async (req, res) => { tier: discoveryAlbum.tier, coverUrl: track.album?.coverUrl, available: true, + duration: track.duration, }); } } } } - + // Fallback: No DiscoveryTrack records or no valid trackIds, find ONE track from library - if (tracks.filter(t => t.album === discoveryAlbum.albumTitle).length === 0) { + if ( + tracks.filter((t) => t.album === discoveryAlbum.albumTitle) + .length === 0 + ) { const album = await prisma.album.findFirst({ where: { title: discoveryAlbum.albumTitle, @@ -227,6 +243,7 @@ router.get("/current", async (req, res) => { tier: discoveryAlbum.tier, coverUrl: album.coverUrl, available: true, + duration: track.duration, }); } else { // Album not in library yet (downloading/pending) @@ -243,13 +260,14 @@ router.get("/current", async (req, res) => { coverUrl: null, available: false, isPending: true, + duration: 0, }); } } } // Get the list of successfully downloaded album MBIDs from discoveryAlbums - const successfulMbids = new Set(discoveryAlbums.map(da => da.rgMbid)); + const successfulMbids = new Set(discoveryAlbums.map((da) => da.rgMbid)); // Filter unavailable albums: // 1. Remove albums that successfully downloaded (have DiscoveryAlbum record) @@ -263,7 +281,8 @@ router.get("/current", async (req, res) => { // Skip if album exists in user's library by artist+title (normalized match) const normalizedArtist = album.artistName.toLowerCase().trim(); - const normalizedAlbum = album.albumTitle.toLowerCase() + const normalizedAlbum = album.albumTitle + .toLowerCase() .replace(/\(.*?\)/g, "") // Remove parenthetical content .replace(/\[.*?\]/g, "") // Remove bracketed content .trim(); @@ -273,11 +292,19 @@ router.get("/current", async (req, res) => { OR: [ { rgMbid: album.albumMbid }, { - title: { contains: normalizedAlbum, mode: "insensitive" }, - artist: { name: { contains: normalizedArtist, mode: "insensitive" } }, - } - ] - } + title: { + contains: normalizedAlbum, + mode: "insensitive", + }, + artist: { + name: { + contains: normalizedArtist, + mode: "insensitive", + }, + }, + }, + ], + }, }); if (existsInLibrary) { @@ -305,24 +332,24 @@ router.get("/current", async (req, res) => { })); try { - console.log(`\nDiscover Weekly API Response:`); - console.log(` Total tracks: ${tracks.length}`); - console.log(` Unavailable albums: ${unavailable.length}`); + logger.debug(`\nDiscover Weekly API Response:`); + logger.debug(` Total tracks: ${tracks.length}`); + logger.debug(` Unavailable albums: ${unavailable.length}`); if (unavailable.length > 0 && unavailable.length <= 20) { - console.log(` Unavailable albums with previews:`); + logger.debug(` Unavailable albums with previews:`); unavailable.slice(0, 5).forEach((album, i) => { - console.log( + logger.debug( ` ${i + 1}. ${album.artist} - ${album.album} [${ album.previewUrl ? "HAS PREVIEW" : "NO PREVIEW" }]` ); }); if (unavailable.length > 5) { - console.log(` ... and ${unavailable.length - 5} more`); + logger.debug(` ... and ${unavailable.length - 5} more`); } } } catch (err) { - console.error("Error logging discover response:", err); + logger.error("Error logging discover response:", err); } res.json({ @@ -334,7 +361,7 @@ router.get("/current", async (req, res) => { unavailableCount: unavailable.length, }); } catch (error) { - console.error("Get current Discover Weekly error:", error); + logger.error("Get current Discover Weekly error:", error); res.status(500).json({ error: "Failed to get Discover Weekly playlist", }); @@ -377,30 +404,46 @@ router.post("/like", async (req, res) => { // Remove discovery tag from the artist in Lidarr // This prevents the artist from being deleted during cleanup - console.log(` Removing discovery tag from artist: ${discoveryAlbum.artistName}`); - + logger.debug( + ` Removing discovery tag from artist: ${discoveryAlbum.artistName}` + ); + // If artistMbid is a temp ID, we need to search Lidarr by artist name instead - if (discoveryAlbum.artistMbid && !discoveryAlbum.artistMbid.startsWith("temp-")) { - await lidarrService.removeDiscoveryTagByMbid(discoveryAlbum.artistMbid); + if ( + discoveryAlbum.artistMbid && + !discoveryAlbum.artistMbid.startsWith("temp-") + ) { + await lidarrService.removeDiscoveryTagByMbid( + discoveryAlbum.artistMbid + ); } else { // Search Lidarr for the artist by name and remove tag try { const lidarrArtists = await lidarrService.getArtists(); const lidarrArtist = lidarrArtists.find( - a => a.artistName.toLowerCase() === discoveryAlbum.artistName.toLowerCase() + (a) => + a.artistName.toLowerCase() === + discoveryAlbum.artistName.toLowerCase() ); - + if (lidarrArtist) { const tagId = await lidarrService.getOrCreateDiscoveryTag(); if (tagId && lidarrArtist.tags?.includes(tagId)) { - await lidarrService.removeTagsFromArtist(lidarrArtist.id, [tagId]); - console.log(` Removed discovery tag from ${lidarrArtist.artistName} (found by name)`); + await lidarrService.removeTagsFromArtist( + lidarrArtist.id, + [tagId] + ); + logger.debug( + ` Removed discovery tag from ${lidarrArtist.artistName} (found by name)` + ); } } else { - console.log(` Artist ${discoveryAlbum.artistName} not found in Lidarr (may have been removed)`); + logger.debug( + ` Artist ${discoveryAlbum.artistName} not found in Lidarr (may have been removed)` + ); } } catch (e: any) { - console.log(` Failed to remove discovery tag: ${e.message}`); + logger.debug(` Failed to remove discovery tag: ${e.message}`); } } @@ -411,8 +454,16 @@ router.post("/like", async (req, res) => { OR: [ { rgMbid: albumId }, { - title: { equals: discoveryAlbum.albumTitle, mode: "insensitive" }, - artist: { name: { equals: discoveryAlbum.artistName, mode: "insensitive" } }, + title: { + equals: discoveryAlbum.albumTitle, + mode: "insensitive", + }, + artist: { + name: { + equals: discoveryAlbum.artistName, + mode: "insensitive", + }, + }, }, ], }, @@ -443,9 +494,13 @@ router.post("/like", async (req, res) => { source: "discovery_liked", }, }); - console.log(` ✓ Added liked album to library: ${dbAlbum.artist.name} - ${dbAlbum.title} (matched from discovery)`); + logger.debug( + ` Added liked album to library: ${dbAlbum.artist.name} - ${dbAlbum.title} (matched from discovery)` + ); } else { - console.log(` [WARN] Could not find scanned album for: ${discoveryAlbum.artistName} - ${discoveryAlbum.albumTitle}`); + logger.debug( + ` [WARN] Could not find scanned album for: ${discoveryAlbum.artistName} - ${discoveryAlbum.albumTitle}` + ); } // Retroactively mark all plays from this album as DISCOVERY_KEPT @@ -474,7 +529,7 @@ router.post("/like", async (req, res) => { res.json({ success: true }); } catch (error) { - console.error("Like discovery album error:", error); + logger.error("Like discovery album error:", error); res.status(500).json({ error: "Failed to like album" }); } }); @@ -543,7 +598,7 @@ router.delete("/unlike", async (req, res) => { res.json({ success: true }); } catch (error) { - console.error("Unlike discovery album error:", error); + logger.error("Unlike discovery album error:", error); res.status(500).json({ error: "Failed to unlike album" }); } }); @@ -573,7 +628,7 @@ router.get("/config", async (req, res) => { res.json(config); } catch (error) { - console.error("Get Discover Weekly config error:", error); + logger.error("Get Discover Weekly config error:", error); res.status(500).json({ error: "Failed to get configuration" }); } }); @@ -582,7 +637,13 @@ router.get("/config", async (req, res) => { router.patch("/config", async (req, res) => { try { const userId = req.user!.id; - const { playlistSize, maxRetryAttempts, exclusionMonths, downloadRatio, enabled } = req.body; + const { + playlistSize, + maxRetryAttempts, + exclusionMonths, + downloadRatio, + enabled, + } = req.body; // Validate playlist size if (playlistSize !== undefined) { @@ -653,7 +714,7 @@ router.patch("/config", async (req, res) => { res.json(config); } catch (error) { - console.error("Update Discover Weekly config error:", error); + logger.error("Update Discover Weekly config error:", error); res.status(500).json({ error: "Failed to update configuration" }); } }); @@ -667,7 +728,7 @@ router.get("/popular-artists", async (req, res) => { res.json({ artists }); } catch (error: any) { - console.error( + logger.error( "[Discover] Get popular artists error:", error?.message || error ); @@ -681,7 +742,7 @@ router.delete("/clear", async (req, res) => { try { const userId = req.user!.id; - console.log(`\n Clearing Discover Weekly playlist for user ${userId}`); + logger.debug(`\n Clearing Discover Weekly playlist for user ${userId}`); // Get all discovery albums for this user const discoveryAlbums = await prisma.discoveryAlbum.findMany({ @@ -705,10 +766,10 @@ router.delete("/clear", async (req, res) => { (a) => a.status === "ACTIVE" ); - console.log( + logger.debug( ` Found ${likedAlbums.length} liked albums to move to library` ); - console.log(` Found ${activeAlbums.length} active albums to delete`); + logger.debug(` Found ${activeAlbums.length} active albums to delete`); // Get system settings for Lidarr const settings = await getSystemSettings(); @@ -718,7 +779,7 @@ router.delete("/clear", async (req, res) => { // Process liked albums - move to library if (likedAlbums.length > 0) { - console.log(`\n[LIBRARY] Moving liked albums to library...`); + logger.debug(`\n[LIBRARY] Moving liked albums to library...`); for (const album of likedAlbums) { try { @@ -761,7 +822,6 @@ router.delete("/clear", async (req, res) => { settings.lidarrApiKey && album.lidarrAlbumId ) { - try { // Get album details from Lidarr const albumResponse = await axios.get( @@ -812,12 +872,12 @@ router.delete("/clear", async (req, res) => { timeout: 30000, } ); - console.log( + logger.debug( ` Moved to library: ${album.artistName} - ${album.albumTitle}` ); } } catch (lidarrError: any) { - console.log( + logger.debug( ` Lidarr move failed for ${album.albumTitle}: ${lidarrError.message}` ); } @@ -832,7 +892,7 @@ router.delete("/clear", async (req, res) => { data: { status: "MOVED" }, }); } catch (error: any) { - console.error( + logger.error( ` ✗ Failed to move ${album.albumTitle}: ${error.message}` ); } @@ -841,7 +901,7 @@ router.delete("/clear", async (req, res) => { // Process active (non-liked) albums - delete them if (activeAlbums.length > 0) { - console.log(`\n[CLEANUP] Deleting non-liked albums...`); + logger.debug(`\n[CLEANUP] Deleting non-liked albums...`); const checkedArtistIds = new Set(); @@ -854,7 +914,6 @@ router.delete("/clear", async (req, res) => { settings.lidarrApiKey && album.lidarrAlbumId ) { - try { // Get album details to find artist ID let artistId: number | undefined; @@ -884,7 +943,7 @@ router.delete("/clear", async (req, res) => { timeout: 10000, } ); - console.log( + logger.debug( ` Deleted from Lidarr: ${album.albumTitle}` ); @@ -944,11 +1003,11 @@ router.delete("/clear", async (req, res) => { timeout: 10000, } ); - console.log( + logger.debug( ` Removed artist from Lidarr: ${artist.artistName}` ); } else { - console.log( + logger.debug( ` Keeping artist in Lidarr: ${artist.artistName} (has library or kept albums)` ); } @@ -958,7 +1017,7 @@ router.delete("/clear", async (req, res) => { } } catch (lidarrError: any) { if (lidarrError.response?.status !== 404) { - console.log( + logger.debug( ` Lidarr delete failed for ${album.albumTitle}: ${lidarrError.message}` ); } @@ -968,23 +1027,38 @@ router.delete("/clear", async (req, res) => { // FALLBACK: Direct filesystem deletion (in case Lidarr's deleteFiles didn't work) // Try to delete files directly from the discovery folder try { - const discoveryPath = path.join(config.music.musicPath, "discovery"); + const discoveryPath = path.join( + config.music.musicPath, + "discovery" + ); // Try common folder structures: /discovery/Artist/Album or /discovery/Artist - Album const possiblePaths = [ - path.join(discoveryPath, album.artistName, album.albumTitle), + path.join( + discoveryPath, + album.artistName, + album.albumTitle + ), path.join(discoveryPath, album.artistName), - path.join(discoveryPath, `${album.artistName} - ${album.albumTitle}`), + path.join( + discoveryPath, + `${album.artistName} - ${album.albumTitle}` + ), ]; for (const albumPath of possiblePaths) { if (fs.existsSync(albumPath)) { - fs.rmSync(albumPath, { recursive: true, force: true }); - console.log(` Direct deleted: ${albumPath}`); + fs.rmSync(albumPath, { + recursive: true, + force: true, + }); + logger.debug(` Direct deleted: ${albumPath}`); break; // Stop after first successful delete } } } catch (fsError: any) { - console.log(` Filesystem delete failed for ${album.albumTitle}: ${fsError.message}`); + logger.debug( + ` Filesystem delete failed for ${album.albumTitle}: ${fsError.message}` + ); } // Delete DiscoveryTrack records first (foreign key to Track) @@ -1022,7 +1096,7 @@ router.delete("/clear", async (req, res) => { activeDeleted++; } catch (error: any) { - console.error( + logger.error( ` ✗ Failed to delete ${album.albumTitle}: ${error.message}` ); } @@ -1032,7 +1106,11 @@ router.delete("/clear", async (req, res) => { // ALSO clean up "extra" downloaded albums that didn't make the final playlist // These are in DownloadJob but not in DiscoveryAlbum // IMPORTANT: Skip any albums where the artist has LIKED content (even if MBID doesn't match) - if (settings.lidarrEnabled && settings.lidarrUrl && settings.lidarrApiKey) { + if ( + settings.lidarrEnabled && + settings.lidarrUrl && + settings.lidarrApiKey + ) { const completedJobs = await prisma.downloadJob.findMany({ where: { userId, @@ -1044,56 +1122,79 @@ router.delete("/clear", async (req, res) => { // Get all DiscoveryAlbum for this user (including ones we just processed) const allDiscoveryAlbums = await prisma.discoveryAlbum.findMany({ where: { userId }, - select: { rgMbid: true, artistName: true, albumTitle: true, status: true }, + select: { + rgMbid: true, + artistName: true, + albumTitle: true, + status: true, + }, }); - const discoveryMbids = new Set(allDiscoveryAlbums.map(da => da.rgMbid)); - + const discoveryMbids = new Set( + allDiscoveryAlbums.map((da) => da.rgMbid) + ); + // Build a set of liked artist names (case-insensitive) for extra protection const likedArtistNames = new Set( allDiscoveryAlbums - .filter(da => da.status === "LIKED" || da.status === "MOVED") - .map(da => da.artistName.toLowerCase()) + .filter( + (da) => da.status === "LIKED" || da.status === "MOVED" + ) + .map((da) => da.artistName.toLowerCase()) ); // Find completed jobs that didn't make the playlist AND aren't from liked artists - const extraJobs = completedJobs.filter(job => { + const extraJobs = completedJobs.filter((job) => { // If MBID matches a discovery album, not an "extra" if (discoveryMbids.has(job.targetMbid)) return false; - + // If this job's artist has any LIKED albums, don't clean it up const metadata = job.metadata as any; const artistName = metadata?.artistName?.toLowerCase(); if (artistName && likedArtistNames.has(artistName)) { - console.log(` Skipping ${metadata?.albumTitle} - artist ${metadata?.artistName} has liked albums`); + logger.debug( + ` Skipping ${metadata?.albumTitle} - artist ${metadata?.artistName} has liked albums` + ); return false; } - + return true; }); if (extraJobs.length > 0) { - console.log(`\n[CLEANUP] Found ${extraJobs.length} extra albums to clean from Lidarr...`); - + logger.debug( + `\n[CLEANUP] Found ${extraJobs.length} extra albums to clean from Lidarr...` + ); + for (const job of extraJobs) { const metadata = job.metadata as any; const albumTitle = metadata?.albumTitle || job.subject; const artistName = metadata?.artistName; - + // Double-check: also check by artist name + album title for LIKED status - const isLikedByName = await prisma.discoveryAlbum.findFirst({ - where: { - userId, - artistName: { equals: artistName, mode: "insensitive" }, - albumTitle: { equals: albumTitle, mode: "insensitive" }, - status: { in: ["LIKED", "MOVED"] }, - }, - }); - + const isLikedByName = await prisma.discoveryAlbum.findFirst( + { + where: { + userId, + artistName: { + equals: artistName, + mode: "insensitive", + }, + albumTitle: { + equals: albumTitle, + mode: "insensitive", + }, + status: { in: ["LIKED", "MOVED"] }, + }, + } + ); + if (isLikedByName) { - console.log(` Skipping ${albumTitle} - marked as LIKED`); + logger.debug( + ` Skipping ${albumTitle} - marked as LIKED` + ); continue; } - + if (job.lidarrAlbumId) { try { // Get artist ID before deleting album @@ -1102,7 +1203,9 @@ router.delete("/clear", async (req, res) => { const albumResponse = await axios.get( `${settings.lidarrUrl}/api/v1/album/${job.lidarrAlbumId}`, { - headers: { "X-Api-Key": settings.lidarrApiKey }, + headers: { + "X-Api-Key": settings.lidarrApiKey, + }, timeout: 10000, } ); @@ -1116,48 +1219,69 @@ router.delete("/clear", async (req, res) => { `${settings.lidarrUrl}/api/v1/album/${job.lidarrAlbumId}`, { params: { deleteFiles: true }, - headers: { "X-Api-Key": settings.lidarrApiKey }, + headers: { + "X-Api-Key": settings.lidarrApiKey, + }, timeout: 10000, } ); - console.log(` Cleaned up extra album: ${albumTitle}`); + logger.debug( + ` Cleaned up extra album: ${albumTitle}` + ); // Check if artist should be removed too if (artistId) { // Check if artist has any liked albums by NAME (more reliable than MBID) - const hasLikedByArtistName = await prisma.discoveryAlbum.findFirst({ - where: { - artistName: { equals: artistName, mode: "insensitive" }, - status: { in: ["LIKED", "MOVED"] }, - }, - }); + const hasLikedByArtistName = + await prisma.discoveryAlbum.findFirst({ + where: { + artistName: { + equals: artistName, + mode: "insensitive", + }, + status: { in: ["LIKED", "MOVED"] }, + }, + }); if (hasLikedByArtistName) { - console.log(` Keeping artist: ${artistName} (has liked albums)`); + logger.debug( + ` Keeping artist: ${artistName} (has liked albums)` + ); continue; } const artistMbid = metadata?.artistMbid; - if (artistMbid && !artistMbid.startsWith("temp-")) { + if ( + artistMbid && + !artistMbid.startsWith("temp-") + ) { // Check if artist has native library content - const hasNativeLibrary = await prisma.ownedAlbum.findFirst({ - where: { - artist: { mbid: artistMbid }, - source: "native_scan", - }, - }); + const hasNativeLibrary = + await prisma.ownedAlbum.findFirst({ + where: { + artist: { mbid: artistMbid }, + source: "native_scan", + }, + }); if (!hasNativeLibrary) { try { await axios.delete( `${settings.lidarrUrl}/api/v1/artist/${artistId}`, { - params: { deleteFiles: true }, - headers: { "X-Api-Key": settings.lidarrApiKey }, + params: { + deleteFiles: true, + }, + headers: { + "X-Api-Key": + settings.lidarrApiKey, + }, timeout: 10000, } ); - console.log(` Removed extra artist from Lidarr: ${artistName}`); + logger.debug( + ` Removed extra artist from Lidarr: ${artistName}` + ); } catch (e) { // Artist might have other albums } @@ -1167,7 +1291,9 @@ router.delete("/clear", async (req, res) => { } catch (e: any) { // Ignore - might already be removed if (e.response?.status !== 404) { - console.log(` Failed to clean up ${albumTitle}: ${e.message}`); + logger.debug( + ` Failed to clean up ${albumTitle}: ${e.message}` + ); } } } @@ -1182,9 +1308,15 @@ router.delete("/clear", async (req, res) => { // === PHASE 1.5: Clean up failed artists from Lidarr === // Get all failed download jobs for this user and remove their artists from Lidarr - if (settings.lidarrEnabled && settings.lidarrUrl && settings.lidarrApiKey) { - console.log(`\n[CLEANUP] Checking for failed artists to remove from Lidarr...`); - + if ( + settings.lidarrEnabled && + settings.lidarrUrl && + settings.lidarrApiKey + ) { + logger.debug( + `\n[CLEANUP] Checking for failed artists to remove from Lidarr...` + ); + const failedJobs = await prisma.downloadJob.findMany({ where: { userId, @@ -1196,12 +1328,15 @@ router.delete("/clear", async (req, res) => { // Group by artist const failedArtistMbids = new Set(); const artistNames = new Map(); - + for (const job of failedJobs) { const metadata = job.metadata as any; if (metadata?.artistMbid) { failedArtistMbids.add(metadata.artistMbid); - artistNames.set(metadata.artistMbid, metadata.artistName || "Unknown"); + artistNames.set( + metadata.artistMbid, + metadata.artistName || "Unknown" + ); } } @@ -1209,28 +1344,38 @@ router.delete("/clear", async (req, res) => { for (const artistMbid of failedArtistMbids) { try { // Check if artist has any NATIVE library content (real user library) - const hasNativeOwnedAlbums = await prisma.ownedAlbum.findFirst({ - where: { - artist: { mbid: artistMbid }, - source: "native_scan", - }, - }); + const hasNativeOwnedAlbums = + await prisma.ownedAlbum.findFirst({ + where: { + artist: { mbid: artistMbid }, + source: "native_scan", + }, + }); if (hasNativeOwnedAlbums) { - console.log(` Keeping ${artistNames.get(artistMbid)} - has native library content`); + logger.debug( + ` Keeping ${artistNames.get( + artistMbid + )} - has native library content` + ); continue; } // Check if artist has any LIKED discovery albums - const hasLikedDiscovery = await prisma.discoveryAlbum.findFirst({ - where: { - artistMbid, - status: { in: ["LIKED", "MOVED"] }, - }, - }); + const hasLikedDiscovery = + await prisma.discoveryAlbum.findFirst({ + where: { + artistMbid, + status: { in: ["LIKED", "MOVED"] }, + }, + }); if (hasLikedDiscovery) { - console.log(` Keeping ${artistNames.get(artistMbid)} - has liked discovery albums`); + logger.debug( + ` Keeping ${artistNames.get( + artistMbid + )} - has liked discovery albums` + ); continue; } @@ -1256,7 +1401,11 @@ router.delete("/clear", async (req, res) => { timeout: 10000, } ); - console.log(` ✓ Removed failed artist from Lidarr: ${artistNames.get(artistMbid)}`); + logger.debug( + ` Removed failed artist from Lidarr: ${artistNames.get( + artistMbid + )}` + ); } } catch (e: any) { // Ignore errors - artist might already be removed @@ -1278,7 +1427,7 @@ router.delete("/clear", async (req, res) => { // === PHASE 2: Clean up orphaned discovery records === // These are Album/Track records with location="DISCOVER" that weren't linked to a DiscoveryAlbum // This can happen if downloads failed or playlist build failed - console.log(`\n Cleaning up orphaned discovery records...`); + logger.debug(`\n Cleaning up orphaned discovery records...`); // Find all DISCOVER albums that don't have a corresponding DiscoveryAlbum record const orphanedAlbums = await prisma.album.findMany({ @@ -1304,7 +1453,7 @@ router.delete("/clear", async (req, res) => { status: { in: ["ACTIVE", "LIKED", "MOVED"] }, // Keep if active, liked, or moved to library }, }); - + // Also check if there's an OwnedAlbum record (user liked it) const hasOwnedRecord = await prisma.ownedAlbum.findFirst({ where: { @@ -1322,14 +1471,14 @@ router.delete("/clear", async (req, res) => { where: { id: orphanAlbum.id }, }); orphanedAlbumsDeleted++; - console.log( + logger.debug( ` Deleted orphaned album: ${orphanAlbum.artist.name} - ${orphanAlbum.title}` ); } } if (orphanedAlbumsDeleted > 0) { - console.log( + logger.debug( ` Cleaned up ${orphanedAlbumsDeleted} orphaned discovery albums` ); } @@ -1343,7 +1492,7 @@ router.delete("/clear", async (req, res) => { if (orphanedArtists.length > 0) { const orphanIds = orphanedArtists.map((a) => a.id); - + // Delete artist relations first (SimilarArtist records) // Note: SimilarArtist uses fromArtistId/toArtistId field names await prisma.similarArtist.deleteMany({ @@ -1358,7 +1507,7 @@ router.delete("/clear", async (req, res) => { await prisma.artist.deleteMany({ where: { id: { in: orphanIds } }, }); - console.log( + logger.debug( ` Cleaned up ${orphanedArtists.length} orphaned artists` ); } @@ -1371,7 +1520,7 @@ router.delete("/clear", async (req, res) => { }); if (orphanedDiscoveryTracks.count > 0) { - console.log( + logger.debug( ` Cleaned up ${orphanedDiscoveryTracks.count} orphaned discovery track records` ); } @@ -1389,7 +1538,7 @@ router.delete("/clear", async (req, res) => { }); if (oldDiscoveryAlbums.count > 0) { - console.log( + logger.debug( ` Cleaned up ${oldDiscoveryAlbums.count} old discovery album records` ); } @@ -1399,68 +1548,89 @@ router.delete("/clear", async (req, res) => { // This is the ONLY reliable way to identify discovery artists // User's pre-existing library is NEVER touched (no tag = safe) let lidarrArtistsRemoved = 0; - if (settings.lidarrEnabled && settings.lidarrUrl && settings.lidarrApiKey) { - console.log(`\n[LIDARR CLEANUP] Tag-based cleanup (lidify-discovery tag)...`); - + if ( + settings.lidarrEnabled && + settings.lidarrUrl && + settings.lidarrApiKey + ) { + logger.debug( + `\n[LIDARR CLEANUP] Tag-based cleanup (lidify-discovery tag)...` + ); + try { // Get all artists with the discovery tag - const discoveryArtists = await lidarrService.getDiscoveryArtists(); - console.log(` Found ${discoveryArtists.length} artists with discovery tag`); - + const discoveryArtists = + await lidarrService.getDiscoveryArtists(); + logger.debug( + ` Found ${discoveryArtists.length} artists with discovery tag` + ); + for (const lidarrArtist of discoveryArtists) { const artistMbid = lidarrArtist.foreignArtistId; const artistName = lidarrArtist.artistName; - + if (!artistMbid) continue; - + // Double-check: if artist has LIKED albums, remove tag but don't delete // (This is a safety net - the like endpoint should have already removed the tag) - const hasKeptDiscovery = await prisma.discoveryAlbum.findFirst({ - where: { - artistMbid: artistMbid, - status: { in: ["LIKED", "MOVED"] }, - }, - }); - + const hasKeptDiscovery = + await prisma.discoveryAlbum.findFirst({ + where: { + artistMbid: artistMbid, + status: { in: ["LIKED", "MOVED"] }, + }, + }); + if (hasKeptDiscovery) { // Remove the tag but keep the artist - console.log(` Keeping ${artistName} - has liked albums (removing tag)`); - await lidarrService.removeDiscoveryTagByMbid(artistMbid); + logger.debug( + ` Keeping ${artistName} - has liked albums (removing tag)` + ); + await lidarrService.removeDiscoveryTagByMbid( + artistMbid + ); continue; } - + // Artist has discovery tag AND no liked albums = safe to delete try { - const result = await lidarrService.deleteArtistById(lidarrArtist.id, true); + const result = await lidarrService.deleteArtistById( + lidarrArtist.id, + true + ); if (result.success) { lidarrArtistsRemoved++; - console.log(` ✓ Removed: ${artistName}`); + logger.debug(` Removed: ${artistName}`); } } catch (deleteError: any) { - console.log(` ✗ Failed to remove ${artistName}: ${deleteError.message}`); + logger.debug( + ` Failed to remove ${artistName}: ${deleteError.message}` + ); } } - - console.log(` Tag-based cleanup complete: ${lidarrArtistsRemoved} artists removed`); + + logger.debug( + ` Tag-based cleanup complete: ${lidarrArtistsRemoved} artists removed` + ); } catch (lidarrError: any) { - console.log(` Lidarr cleanup failed: ${lidarrError.message}`); + logger.debug(` Lidarr cleanup failed: ${lidarrError.message}`); } } // === PHASE 4: Trigger library scan to sync database with filesystem === - console.log(`\n[SCAN] Triggering library scan to sync database...`); + logger.debug(`\n[SCAN] Triggering library scan to sync database...`); try { await scanQueue.add("scan", { userId, musicPath: config.music.musicPath, }); - console.log(` Library scan queued successfully`); + logger.debug(` Library scan queued successfully`); } catch (scanError: any) { - console.log(` Library scan queue failed: ${scanError.message}`); + logger.debug(` Library scan queue failed: ${scanError.message}`); // Non-fatal - continue with response } - console.log( + logger.debug( `\nClear complete: ${likedMoved} moved to library, ${activeDeleted} deleted, ${orphanedAlbumsDeleted} orphans cleaned, ${lidarrArtistsRemoved} Lidarr artists removed` ); @@ -1473,11 +1643,14 @@ router.delete("/clear", async (req, res) => { lidarrArtistsRemoved, }); } catch (error: any) { - console.error("Clear discovery playlist error:", error?.message || error); - console.error("Stack:", error?.stack); - res.status(500).json({ + logger.error( + "Clear discovery playlist error:", + error?.message || error + ); + logger.error("Stack:", error?.stack); + res.status(500).json({ error: "Failed to clear discovery playlist", - details: error?.message || "Unknown error" + details: error?.message || "Unknown error", }); } }); @@ -1488,9 +1661,9 @@ router.get("/exclusions", async (req, res) => { const userId = req.user!.id; const exclusions = await prisma.discoverExclusion.findMany({ - where: { + where: { userId, - expiresAt: { gt: new Date() } // Only active exclusions + expiresAt: { gt: new Date() }, // Only active exclusions }, orderBy: { lastSuggestedAt: "desc" }, }); @@ -1510,9 +1683,12 @@ router.get("/exclusions", async (req, res) => { count: exclusions.length, }); } catch (error: any) { - console.error("Get exclusions error:", error?.message || error); - console.error("Stack:", error?.stack); - res.status(500).json({ error: "Failed to get exclusions", details: error?.message }); + logger.error("Get exclusions error:", error?.message || error); + logger.error("Stack:", error?.stack); + res.status(500).json({ + error: "Failed to get exclusions", + details: error?.message, + }); } }); @@ -1525,7 +1701,9 @@ router.delete("/exclusions", async (req, res) => { where: { userId }, }); - console.log(`[Discovery] Cleared ${result.count} exclusions for user ${userId}`); + logger.debug( + `[Discovery] Cleared ${result.count} exclusions for user ${userId}` + ); res.json({ success: true, @@ -1533,7 +1711,7 @@ router.delete("/exclusions", async (req, res) => { clearedCount: result.count, }); } catch (error) { - console.error("Clear exclusions error:", error); + logger.error("Clear exclusions error:", error); res.status(500).json({ error: "Failed to clear exclusions" }); } }); @@ -1561,7 +1739,7 @@ router.delete("/exclusions/:id", async (req, res) => { message: "Exclusion removed", }); } catch (error) { - console.error("Remove exclusion error:", error); + logger.error("Remove exclusion error:", error); res.status(500).json({ error: "Failed to remove exclusion" }); } }); @@ -1570,14 +1748,20 @@ router.delete("/exclusions/:id", async (req, res) => { // This cleans up artists that were added for discovery but shouldn't remain router.post("/cleanup-lidarr", async (req, res) => { try { - console.log("\n[CLEANUP] Starting Lidarr cleanup of discovery-only artists..."); - + logger.debug( + "\n[CLEANUP] Starting Lidarr cleanup of discovery-only artists..." + ); + const settings = await getSystemSettings(); - - if (!settings.lidarrEnabled || !settings.lidarrUrl || !settings.lidarrApiKey) { + + if ( + !settings.lidarrEnabled || + !settings.lidarrUrl || + !settings.lidarrApiKey + ) { return res.status(400).json({ error: "Lidarr not configured" }); } - + // Get all artists from Lidarr const lidarrResponse = await axios.get( `${settings.lidarrUrl}/api/v1/artist`, @@ -1586,20 +1770,22 @@ router.post("/cleanup-lidarr", async (req, res) => { timeout: 30000, } ); - + const lidarrArtists = lidarrResponse.data; - console.log(`[CLEANUP] Found ${lidarrArtists.length} artists in Lidarr`); - + logger.debug( + `[CLEANUP] Found ${lidarrArtists.length} artists in Lidarr` + ); + const artistsRemoved: string[] = []; const artistsKept: string[] = []; const errors: string[] = []; - + for (const lidarrArtist of lidarrArtists) { const artistMbid = lidarrArtist.foreignArtistId; const artistName = lidarrArtist.artistName; - + if (!artistMbid) continue; - + try { // Check if this artist has any NATIVE library content (real user library) // This is more reliable than checking Album.location which can be wrong @@ -1609,39 +1795,45 @@ router.post("/cleanup-lidarr", async (req, res) => { source: "native_scan", }, }); - + // Check if artist has any LIKED/MOVED discovery albums - const hasKeptDiscoveryAlbums = await prisma.discoveryAlbum.findFirst({ - where: { - artistMbid: artistMbid, - status: { in: ["LIKED", "MOVED"] }, - }, - }); - + const hasKeptDiscoveryAlbums = + await prisma.discoveryAlbum.findFirst({ + where: { + artistMbid: artistMbid, + status: { in: ["LIKED", "MOVED"] }, + }, + }); + // Check if artist has any ACTIVE discovery albums (current playlist) - const hasActiveDiscoveryAlbums = await prisma.discoveryAlbum.findFirst({ - where: { - artistMbid: artistMbid, - status: "ACTIVE", - }, - }); - + const hasActiveDiscoveryAlbums = + await prisma.discoveryAlbum.findFirst({ + where: { + artistMbid: artistMbid, + status: "ACTIVE", + }, + }); + if (hasNativeOwnedAlbums || hasKeptDiscoveryAlbums) { // This artist should stay in Lidarr - artistsKept.push(`${artistName} (has native library or kept albums)`); + artistsKept.push( + `${artistName} (has native library or kept albums)` + ); continue; } - + if (hasActiveDiscoveryAlbums) { // This artist has a current discovery album, keep for now artistsKept.push(`${artistName} (has active discovery)`); continue; } - + // This artist has no library albums and no active/kept discovery albums // They should be removed from Lidarr - console.log(`[CLEANUP] Removing discovery-only artist: ${artistName}`); - + logger.debug( + `[CLEANUP] Removing discovery-only artist: ${artistName}` + ); + await axios.delete( `${settings.lidarrUrl}/api/v1/artist/${lidarrArtist.id}`, { @@ -1650,22 +1842,21 @@ router.post("/cleanup-lidarr", async (req, res) => { timeout: 30000, } ); - + artistsRemoved.push(artistName); - console.log(`[CLEANUP] ✓ Removed: ${artistName}`); - + logger.debug(`[CLEANUP] Removed: ${artistName}`); } catch (error: any) { const msg = `Failed to process ${artistName}: ${error.message}`; errors.push(msg); - console.error(`[CLEANUP] ✗ ${msg}`); + logger.error(`[CLEANUP] ${msg}`); } } - - console.log(`\n[CLEANUP] Complete:`); - console.log(` - Removed: ${artistsRemoved.length}`); - console.log(` - Kept: ${artistsKept.length}`); - console.log(` - Errors: ${errors.length}`); - + + logger.debug(`\n[CLEANUP] Complete:`); + logger.debug(` - Removed: ${artistsRemoved.length}`); + logger.debug(` - Kept: ${artistsKept.length}`); + logger.debug(` - Errors: ${errors.length}`); + res.json({ success: true, removed: artistsRemoved, @@ -1678,8 +1869,11 @@ router.post("/cleanup-lidarr", async (req, res) => { }, }); } catch (error: any) { - console.error("[CLEANUP] Lidarr cleanup error:", error?.message || error); - res.status(500).json({ + logger.error( + "[CLEANUP] Lidarr cleanup error:", + error?.message || error + ); + res.status(500).json({ error: "Failed to cleanup Lidarr", details: error?.message || "Unknown error", }); @@ -1691,23 +1885,25 @@ router.post("/cleanup-lidarr", async (req, res) => { // IMPORTANT: Does NOT touch albums that user has LIKED (discovery_liked) or native library router.post("/fix-tagging", async (req, res) => { try { - console.log("\n[FIX-TAGGING] Starting album tagging repair..."); - + logger.debug("\n[FIX-TAGGING] Starting album tagging repair..."); + // Get all discovery artists (from DiscoveryAlbum records) const discoveryArtists = await prisma.discoveryAlbum.findMany({ - distinct: ['artistMbid'], + distinct: ["artistMbid"], select: { artistMbid: true, artistName: true }, }); - - console.log(`[FIX-TAGGING] Found ${discoveryArtists.length} artists with discovery records`); - + + logger.debug( + `[FIX-TAGGING] Found ${discoveryArtists.length} artists with discovery records` + ); + let albumsFixed = 0; let ownedRecordsRemoved = 0; const fixedArtists: string[] = []; - + for (const da of discoveryArtists) { if (!da.artistMbid) continue; - + // Check if artist has ANY protected content: // 1. native_scan = real user library from before discovery // 2. discovery_liked = user liked a discovery album (should be kept!) @@ -1717,13 +1913,15 @@ router.post("/fix-tagging", async (req, res) => { source: { in: ["native_scan", "discovery_liked"] }, }, }); - + if (hasProtectedContent) { // Artist has protected content - don't touch their albums - console.log(`[FIX-TAGGING] Skipping ${da.artistName} - has protected content (${hasProtectedContent.source})`); + logger.debug( + `[FIX-TAGGING] Skipping ${da.artistName} - has protected content (${hasProtectedContent.source})` + ); continue; } - + // Also check if artist has any LIKED discovery albums (double-check) const hasLikedDiscovery = await prisma.discoveryAlbum.findFirst({ where: { @@ -1731,13 +1929,15 @@ router.post("/fix-tagging", async (req, res) => { status: { in: ["LIKED", "MOVED"] }, }, }); - + if (hasLikedDiscovery) { // User liked albums from this artist - don't touch - console.log(`[FIX-TAGGING] Skipping ${da.artistName} - has LIKED discovery albums`); + logger.debug( + `[FIX-TAGGING] Skipping ${da.artistName} - has LIKED discovery albums` + ); continue; } - + // This artist has NO protected content - they're purely an ACTIVE discovery artist // Fix any of their albums that are incorrectly tagged as LIBRARY const mistaggedAlbums = await prisma.album.findMany({ @@ -1746,7 +1946,7 @@ router.post("/fix-tagging", async (req, res) => { location: "LIBRARY", }, }); - + if (mistaggedAlbums.length > 0) { // Update all these albums to DISCOVER const updated = await prisma.album.updateMany({ @@ -1756,7 +1956,7 @@ router.post("/fix-tagging", async (req, res) => { }, data: { location: "DISCOVER" }, }); - + // Remove incorrect OwnedAlbum records (but not protected ones) const removed = await prisma.ownedAlbum.deleteMany({ where: { @@ -1764,17 +1964,21 @@ router.post("/fix-tagging", async (req, res) => { source: { notIn: ["native_scan", "discovery_liked"] }, }, }); - + albumsFixed += updated.count; ownedRecordsRemoved += removed.count; fixedArtists.push(da.artistName); - - console.log(`[FIX-TAGGING] Fixed ${updated.count} albums for ${da.artistName}`); + + logger.debug( + `[FIX-TAGGING] Fixed ${updated.count} albums for ${da.artistName}` + ); } } - - console.log(`[FIX-TAGGING] Complete: ${albumsFixed} albums fixed, ${ownedRecordsRemoved} OwnedAlbum records removed`); - + + logger.debug( + `[FIX-TAGGING] Complete: ${albumsFixed} albums fixed, ${ownedRecordsRemoved} OwnedAlbum records removed` + ); + res.json({ success: true, albumsFixed, @@ -1782,8 +1986,8 @@ router.post("/fix-tagging", async (req, res) => { fixedArtists, }); } catch (error: any) { - console.error("[FIX-TAGGING] Error:", error?.message || error); - res.status(500).json({ + logger.error("[FIX-TAGGING] Error:", error?.message || error); + res.status(500).json({ error: "Failed to fix album tagging", details: error?.message || "Unknown error", }); diff --git a/backend/src/routes/downloads.ts b/backend/src/routes/downloads.ts index 899aaca..2454268 100644 --- a/backend/src/routes/downloads.ts +++ b/backend/src/routes/downloads.ts @@ -1,9 +1,11 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuthOrToken } from "../middleware/auth"; import { prisma } from "../utils/db"; import { config } from "../config"; import { lidarrService } from "../services/lidarr"; import { musicBrainzService } from "../services/musicbrainz"; +import { lastFmService } from "../services/lastfm"; import { simpleDownloadManager } from "../services/simpleDownloadManager"; import crypto from "crypto"; @@ -11,6 +13,78 @@ const router = Router(); router.use(requireAuthOrToken); +/** + * Verify and potentially correct artist name before download + * Uses multiple sources for canonical name resolution: + * 1. MusicBrainz (if MBID provided) - most authoritative + * 2. LastFM correction API - handles aliases and misspellings + * 3. Original name - fallback + * + * @returns Object with verified name and whether correction was applied + */ +async function verifyArtistName( + artistName: string, + artistMbid?: string +): Promise<{ + verifiedName: string; + wasCorrected: boolean; + source: "musicbrainz" | "lastfm" | "original"; + originalName: string; +}> { + const originalName = artistName; + + // Strategy 1: If we have MBID, use MusicBrainz as authoritative source + if (artistMbid) { + try { + const mbArtist = await musicBrainzService.getArtist(artistMbid); + if (mbArtist?.name) { + return { + verifiedName: mbArtist.name, + wasCorrected: + mbArtist.name.toLowerCase() !== + artistName.toLowerCase(), + source: "musicbrainz", + originalName, + }; + } + } catch (error) { + logger.warn( + `MusicBrainz lookup failed for MBID ${artistMbid}:`, + error + ); + } + } + + // Strategy 2: Use LastFM correction API + try { + const correction = await lastFmService.getArtistCorrection(artistName); + if (correction?.corrected) { + logger.debug( + `[VERIFY] LastFM correction: "${artistName}" → "${correction.canonicalName}"` + ); + return { + verifiedName: correction.canonicalName, + wasCorrected: true, + source: "lastfm", + originalName, + }; + } + } catch (error) { + logger.warn( + `LastFM correction lookup failed for "${artistName}":`, + error + ); + } + + // Strategy 3: Return original name + return { + verifiedName: artistName, + wasCorrected: false, + source: "original", + originalName, + }; +} + // POST /downloads - Create download job router.post("/", async (req, res) => { try { @@ -75,6 +149,18 @@ router.post("/", async (req, res) => { }); } + // Single album download - verify artist name before proceeding + let verifiedArtistName = artistName; + if (type === "album" && artistName) { + const verification = await verifyArtistName(artistName, mbid); + if (verification.wasCorrected) { + logger.debug( + `[DOWNLOAD] Artist name verified: "${artistName}" → "${verification.verifiedName}" (source: ${verification.source})` + ); + verifiedArtistName = verification.verifiedName; + } + } + // Single album download - check for existing job first const existingJob = await prisma.downloadJob.findFirst({ where: { @@ -84,7 +170,9 @@ router.post("/", async (req, res) => { }); if (existingJob) { - console.log(`[DOWNLOAD] Job already exists for ${mbid}: ${existingJob.id} (${existingJob.status})`); + logger.debug( + `[DOWNLOAD] Job already exists for ${mbid}: ${existingJob.id} (${existingJob.status})` + ); return res.json({ id: existingJob.id, status: existingJob.status, @@ -105,13 +193,13 @@ router.post("/", async (req, res) => { metadata: { downloadType, rootFolderPath, - artistName, + artistName: verifiedArtistName, albumTitle, }, }, }); - console.log( + logger.debug( `[DOWNLOAD] Triggering Lidarr: ${type} "${subject}" -> ${rootFolderPath}` ); @@ -122,10 +210,10 @@ router.post("/", async (req, res) => { mbid, subject, rootFolderPath, - artistName, + verifiedArtistName, albumTitle ).catch((error) => { - console.error( + logger.error( `Download processing failed for job ${job.id}:`, error ); @@ -139,7 +227,7 @@ router.post("/", async (req, res) => { message: "Download job created. Processing in background.", }); } catch (error) { - console.error("Create download job error:", error); + logger.error("Create download job error:", error); res.status(500).json({ error: "Failed to create download job" }); } }); @@ -154,27 +242,66 @@ async function processArtistDownload( rootFolderPath: string, downloadType: string ): Promise<{ id: string; subject: string }[]> { - console.log(`\n Processing artist download: ${artistName}`); - console.log(` Artist MBID: ${artistMbid}`); + logger.debug(`\n Processing artist download: ${artistName}`); + logger.debug(` Artist MBID: ${artistMbid}`); // Generate a batch ID to group all album downloads const batchId = crypto.randomUUID(); - console.log(` Batch ID: ${batchId}`); + logger.debug(` Batch ID: ${batchId}`); + + // CRITICAL FIX: Resolve canonical artist name from MusicBrainz + // Last.fm may return aliases (e.g., "blink" for "blink-182") + // Lidarr needs the official name to find the correct artist + let canonicalArtistName = artistName; + try { + logger.debug(` Resolving canonical artist name from MusicBrainz...`); + const mbArtist = await musicBrainzService.getArtist(artistMbid); + if (mbArtist && mbArtist.name) { + canonicalArtistName = mbArtist.name; + if (canonicalArtistName !== artistName) { + logger.debug( + ` ✓ Canonical name resolved: "${artistName}" → "${canonicalArtistName}"` + ); + } else { + logger.debug( + ` ✓ Name matches canonical: "${canonicalArtistName}"` + ); + } + } + } catch (mbError: any) { + logger.warn(` ⚠ MusicBrainz lookup failed: ${mbError.message}`); + // Fallback to LastFM correction + try { + const correction = await lastFmService.getArtistCorrection( + artistName + ); + if (correction?.canonicalName) { + canonicalArtistName = correction.canonicalName; + logger.debug( + ` ✓ Name resolved via LastFM: "${artistName}" → "${canonicalArtistName}"` + ); + } + } catch (lfmError) { + logger.warn( + ` ⚠ LastFM correction also failed, using original name` + ); + } + } try { // First, add the artist to Lidarr (this monitors all albums) const lidarrArtist = await lidarrService.addArtist( artistMbid, - artistName, + canonicalArtistName, rootFolderPath ); if (!lidarrArtist) { - console.log(` Failed to add artist to Lidarr`); + logger.debug(` Failed to add artist to Lidarr`); throw new Error("Failed to add artist to Lidarr"); } - console.log(` Artist added to Lidarr (ID: ${lidarrArtist.id})`); + logger.debug(` Artist added to Lidarr (ID: ${lidarrArtist.id})`); // Fetch albums from MusicBrainz const releaseGroups = await musicBrainzService.getReleaseGroups( @@ -183,12 +310,12 @@ async function processArtistDownload( 100 ); - console.log( + logger.debug( ` Found ${releaseGroups.length} albums/EPs from MusicBrainz` ); if (releaseGroups.length === 0) { - console.log(` No albums found for artist`); + logger.debug(` No albums found for artist`); return []; } @@ -206,49 +333,84 @@ async function processArtistDownload( }); if (existingAlbum) { - console.log(` Skipping "${albumTitle}" - already in library`); + logger.debug(` Skipping "${albumTitle}" - already in library`); continue; } - // Check if there's already a pending/processing job for this album - const existingJob = await prisma.downloadJob.findFirst({ - where: { - targetMbid: albumMbid, - status: { in: ["pending", "processing"] }, - }, + // Use transaction to prevent race conditions when creating jobs + const jobResult = await prisma.$transaction(async (tx) => { + // Check for existing active job + const existingJob = await tx.downloadJob.findFirst({ + where: { + targetMbid: albumMbid, + status: { in: ["pending", "processing"] }, + }, + }); + + if (existingJob) { + return { + skipped: true, + job: existingJob, + reason: "already_queued", + }; + } + + // Also check for recently failed job (within last 30 seconds) to prevent spam retries + const recentFailed = await tx.downloadJob.findFirst({ + where: { + targetMbid: albumMbid, + status: "failed", + completedAt: { gte: new Date(Date.now() - 30000) }, + }, + }); + + if (recentFailed) { + return { + skipped: true, + job: recentFailed, + reason: "recently_failed", + }; + } + + // Create new job inside transaction + const now = new Date(); + const job = await tx.downloadJob.create({ + data: { + userId, + subject: albumSubject, + type: "album", + targetMbid: albumMbid, + status: "pending", + metadata: { + downloadType, + rootFolderPath, + artistName, + artistMbid, + albumTitle, + batchId, // Link all albums in this artist download + batchArtist: artistName, + createdAt: now.toISOString(), // Track when job was created for timeout + }, + }, + }); + + return { skipped: false, job }; }); - if (existingJob) { - console.log( - ` Skipping "${albumTitle}" - already in download queue` + if (jobResult.skipped) { + logger.debug( + ` Skipping "${albumTitle}" - ${ + jobResult.reason === "recently_failed" + ? "recently failed" + : "already in download queue" + }` ); continue; } - // Create download job for this album - const now = new Date(); - const job = await prisma.downloadJob.create({ - data: { - userId, - subject: albumSubject, - type: "album", - targetMbid: albumMbid, - status: "pending", - metadata: { - downloadType, - rootFolderPath, - artistName, - artistMbid, - albumTitle, - batchId, // Link all albums in this artist download - batchArtist: artistName, - createdAt: now.toISOString(), // Track when job was created for timeout - }, - }, - }); - + const job = jobResult.job; jobs.push({ id: job.id, subject: albumSubject }); - console.log(` [JOB] Created job for: ${albumSubject}`); + logger.debug(` [JOB] Created job for: ${albumSubject}`); // Start the download in background processDownload( @@ -260,14 +422,14 @@ async function processArtistDownload( artistName, albumTitle ).catch((error) => { - console.error(`Download failed for ${albumSubject}:`, error); + logger.error(`Download failed for ${albumSubject}:`, error); }); } - console.log(` Created ${jobs.length} album download jobs`); + logger.debug(` Created ${jobs.length} album download jobs`); return jobs; } catch (error: any) { - console.error(` Failed to process artist download:`, error.message); + logger.error(` Failed to process artist download:`, error.message); throw error; } } @@ -284,7 +446,7 @@ async function processDownload( ) { const job = await prisma.downloadJob.findUnique({ where: { id: jobId } }); if (!job) { - console.error(`Job ${jobId} not found`); + logger.error(`Job ${jobId} not found`); return; } @@ -304,7 +466,7 @@ async function processDownload( } } - console.log(`Parsed: Artist="${parsedArtist}", Album="${parsedAlbum}"`); + logger.debug(`Parsed: Artist="${parsedArtist}", Album="${parsedAlbum}"`); // Use simple download manager for album downloads const result = await simpleDownloadManager.startDownload( @@ -316,7 +478,7 @@ async function processDownload( ); if (!result.success) { - console.error(`Failed to start download: ${result.error}`); + logger.error(`Failed to start download: ${result.error}`); } } } @@ -335,12 +497,12 @@ router.delete("/clear-all", async (req, res) => { const result = await prisma.downloadJob.deleteMany({ where }); - console.log( + logger.debug( ` Cleared ${result.count} download jobs for user ${userId}` ); res.json({ success: true, deleted: result.count }); } catch (error) { - console.error("Clear downloads error:", error); + logger.error("Clear downloads error:", error); res.status(500).json({ error: "Failed to clear downloads" }); } }); @@ -355,7 +517,7 @@ router.post("/clear-lidarr-queue", async (req, res) => { errors: result.errors, }); } catch (error: any) { - console.error("Clear Lidarr queue error:", error); + logger.error("Clear Lidarr queue error:", error); res.status(500).json({ error: "Failed to clear Lidarr queue" }); } }); @@ -373,7 +535,7 @@ router.get("/failed", async (req, res) => { res.json(failedAlbums); } catch (error) { - console.error("List failed albums error:", error); + logger.error("List failed albums error:", error); res.status(500).json({ error: "Failed to list failed albums" }); } }); @@ -399,7 +561,7 @@ router.delete("/failed/:id", async (req, res) => { res.json({ success: true }); } catch (error) { - console.error("Delete failed album error:", error); + logger.error("Delete failed album error:", error); res.status(500).json({ error: "Failed to delete failed album" }); } }); @@ -423,7 +585,7 @@ router.get("/:id", async (req, res) => { res.json(job); } catch (error) { - console.error("Get download job error:", error); + logger.error("Get download job error:", error); res.status(500).json({ error: "Failed to get download job" }); } }); @@ -456,7 +618,7 @@ router.patch("/:id", async (req, res) => { res.json(updated); } catch (error) { - console.error("Update download job error:", error); + logger.error("Update download job error:", error); res.status(500).json({ error: "Failed to update download job" }); } }); @@ -479,8 +641,8 @@ router.delete("/:id", async (req, res) => { // Return success even if nothing was deleted (idempotent delete) res.json({ success: true, deleted: result.count > 0 }); } catch (error: any) { - console.error("Delete download job error:", error); - console.error("Error details:", error.message, error.stack); + logger.error("Delete download job error:", error); + logger.error("Error details:", error.message, error.stack); res.status(500).json({ error: "Failed to delete download job", details: error.message, @@ -492,7 +654,12 @@ router.delete("/:id", async (req, res) => { router.get("/", async (req, res) => { try { const userId = req.user!.id; - const { status, limit = "50", includeDiscovery = "false", includeCleared = "false" } = req.query; + const { + status, + limit = "50", + includeDiscovery = "false", + includeCleared = "false", + } = req.query; const where: any = { userId }; if (status) { @@ -521,7 +688,7 @@ router.get("/", async (req, res) => { res.json(filteredJobs); } catch (error) { - console.error("List download jobs error:", error); + logger.error("List download jobs error:", error); res.status(500).json({ error: "Failed to list download jobs" }); } }); @@ -580,7 +747,7 @@ router.post("/keep-track", async (req, res) => { "Track marked as kept. Please add the full album manually to your /music folder.", }); } catch (error) { - console.error("Keep track error:", error); + logger.error("Keep track error:", error); res.status(500).json({ error: "Failed to keep track" }); } }); diff --git a/backend/src/routes/enrichment.ts b/backend/src/routes/enrichment.ts index 4ec57a7..ad42ad5 100644 --- a/backend/src/routes/enrichment.ts +++ b/backend/src/routes/enrichment.ts @@ -1,7 +1,19 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth, requireAdmin } from "../middleware/auth"; import { enrichmentService } from "../services/enrichment"; -import { getEnrichmentProgress, runFullEnrichment } from "../workers/unifiedEnrichment"; +import { + getEnrichmentProgress, + runFullEnrichment, +} from "../workers/unifiedEnrichment"; +import { enrichmentStateService } from "../services/enrichmentState"; +import { enrichmentFailureService } from "../services/enrichmentFailureService"; +import { + getSystemSettings, + invalidateSystemSettingsCache, +} from "../utils/systemSettings"; +import { rateLimiter } from "../services/rateLimiter"; +import { redisClient } from "../utils/redis"; const router = Router(); @@ -16,11 +28,82 @@ router.get("/progress", async (req, res) => { const progress = await getEnrichmentProgress(); res.json(progress); } catch (error) { - console.error("Get enrichment progress error:", error); + logger.error("Get enrichment progress error:", error); res.status(500).json({ error: "Failed to get progress" }); } }); +/** + * GET /enrichment/status + * Get detailed enrichment state (running, paused, etc.) + */ +router.get("/status", async (req, res) => { + try { + const state = await enrichmentStateService.getState(); + res.json(state || { status: "idle", currentPhase: null }); + } catch (error) { + logger.error("Get enrichment status error:", error); + res.status(500).json({ error: "Failed to get status" }); + } +}); + +/** + * POST /enrichment/pause + * Pause the enrichment process + */ +router.post("/pause", requireAdmin, async (req, res) => { + try { + const state = await enrichmentStateService.pause(); + res.json({ + message: "Enrichment paused", + state, + }); + } catch (error: any) { + logger.error("Pause enrichment error:", error); + res.status(400).json({ + error: error.message || "Failed to pause enrichment", + }); + } +}); + +/** + * POST /enrichment/resume + * Resume a paused enrichment process + */ +router.post("/resume", requireAdmin, async (req, res) => { + try { + const state = await enrichmentStateService.resume(); + res.json({ + message: "Enrichment resumed", + state, + }); + } catch (error: any) { + logger.error("Resume enrichment error:", error); + res.status(400).json({ + error: error.message || "Failed to resume enrichment", + }); + } +}); + +/** + * POST /enrichment/stop + * Stop the enrichment process + */ +router.post("/stop", requireAdmin, async (req, res) => { + try { + const state = await enrichmentStateService.stop(); + res.json({ + message: "Enrichment stopping...", + state, + }); + } catch (error: any) { + logger.error("Stop enrichment error:", error); + res.status(400).json({ + error: error.message || "Failed to stop enrichment", + }); + } +}); + /** * POST /enrichment/full * Trigger full enrichment (re-enriches everything regardless of status) @@ -29,20 +112,48 @@ router.get("/progress", async (req, res) => { router.post("/full", requireAdmin, async (req, res) => { try { // This runs in the background - runFullEnrichment().catch(err => { - console.error("Full enrichment error:", err); + runFullEnrichment().catch((err) => { + logger.error("Full enrichment error:", err); }); - - res.json({ + + res.json({ message: "Full enrichment started", - description: "All artists, track tags, and audio analysis will be re-processed" + description: + "All artists, track tags, and audio analysis will be re-processed", }); } catch (error) { - console.error("Trigger full enrichment error:", error); + logger.error("Trigger full enrichment error:", error); res.status(500).json({ error: "Failed to start full enrichment" }); } }); +/** + * POST /enrichment/sync + * Trigger incremental enrichment (only processes pending items) + * Fast sync that picks up new content without re-processing everything + */ +router.post("/sync", async (req, res) => { + try { + const { triggerEnrichmentNow } = await import( + "../workers/unifiedEnrichment" + ); + + // Trigger immediate enrichment cycle (incremental mode) + const result = await triggerEnrichmentNow(); + + res.json({ + message: "Incremental sync started", + description: "Processing new and pending items only", + result, + }); + } catch (error: any) { + logger.error("Trigger sync error:", error); + res.status(500).json({ + error: error.message || "Failed to start sync", + }); + } +}); + /** * GET /enrichment/settings * Get enrichment settings for current user @@ -53,7 +164,7 @@ router.get("/settings", async (req, res) => { const settings = await enrichmentService.getSettings(userId); res.json(settings); } catch (error) { - console.error("Get enrichment settings error:", error); + logger.error("Get enrichment settings error:", error); res.status(500).json({ error: "Failed to get settings" }); } }); @@ -65,10 +176,13 @@ router.get("/settings", async (req, res) => { router.put("/settings", async (req, res) => { try { const userId = req.user!.id; - const settings = await enrichmentService.updateSettings(userId, req.body); + const settings = await enrichmentService.updateSettings( + userId, + req.body + ); res.json(settings); } catch (error) { - console.error("Update enrichment settings error:", error); + logger.error("Update enrichment settings error:", error); res.status(500).json({ error: "Failed to update settings" }); } }); @@ -86,14 +200,20 @@ router.post("/artist/:id", async (req, res) => { return res.status(400).json({ error: "Enrichment is not enabled" }); } - const enrichmentData = await enrichmentService.enrichArtist(req.params.id, settings); + const enrichmentData = await enrichmentService.enrichArtist( + req.params.id, + settings + ); if (!enrichmentData) { return res.status(404).json({ error: "No enrichment data found" }); } if (enrichmentData.confidence > 0.3) { - await enrichmentService.applyArtistEnrichment(req.params.id, enrichmentData); + await enrichmentService.applyArtistEnrichment( + req.params.id, + enrichmentData + ); } res.json({ @@ -102,8 +222,10 @@ router.post("/artist/:id", async (req, res) => { data: enrichmentData, }); } catch (error: any) { - console.error("Enrich artist error:", error); - res.status(500).json({ error: error.message || "Failed to enrich artist" }); + logger.error("Enrich artist error:", error); + res.status(500).json({ + error: error.message || "Failed to enrich artist", + }); } }); @@ -120,14 +242,20 @@ router.post("/album/:id", async (req, res) => { return res.status(400).json({ error: "Enrichment is not enabled" }); } - const enrichmentData = await enrichmentService.enrichAlbum(req.params.id, settings); + const enrichmentData = await enrichmentService.enrichAlbum( + req.params.id, + settings + ); if (!enrichmentData) { return res.status(404).json({ error: "No enrichment data found" }); } if (enrichmentData.confidence > 0.3) { - await enrichmentService.applyAlbumEnrichment(req.params.id, enrichmentData); + await enrichmentService.applyAlbumEnrichment( + req.params.id, + enrichmentData + ); } res.json({ @@ -136,8 +264,10 @@ router.post("/album/:id", async (req, res) => { data: enrichmentData, }); } catch (error: any) { - console.error("Enrich album error:", error); - res.status(500).json({ error: error.message || "Failed to enrich album" }); + logger.error("Enrich album error:", error); + res.status(500).json({ + error: error.message || "Failed to enrich album", + }); } }); @@ -148,7 +278,9 @@ router.post("/album/:id", async (req, res) => { router.post("/start", async (req, res) => { try { const userId = req.user!.id; - const { notificationService } = await import("../services/notificationService"); + const { notificationService } = await import( + "../services/notificationService" + ); // Check if enrichment is enabled in system settings const { prisma } = await import("../utils/db"); @@ -158,7 +290,9 @@ router.post("/start", async (req, res) => { }); if (!systemSettings?.autoEnrichMetadata) { - return res.status(400).json({ error: "Enrichment is not enabled. Enable it in settings first." }); + return res.status(400).json({ + error: "Enrichment is not enabled. Enable it in settings first.", + }); } // Get user enrichment settings or use defaults @@ -175,50 +309,282 @@ router.post("/start", async (req, res) => { ); // Start enrichment in background - enrichmentService.enrichLibrary(userId).then(async () => { - // Send notification when complete - await notificationService.notifySystem( - userId, - "Library Enrichment Complete", - "All artist metadata has been enriched" - ); - }).catch(async (error) => { - console.error("Background enrichment failed:", error); - await notificationService.create({ - userId, - type: "error", - title: "Enrichment Failed", - message: error.message || "Failed to enrich library metadata", + enrichmentService + .enrichLibrary(userId) + .then(async () => { + // Send notification when complete + await notificationService.notifySystem( + userId, + "Library Enrichment Complete", + "All artist metadata has been enriched" + ); + }) + .catch(async (error) => { + logger.error("Background enrichment failed:", error); + await notificationService.create({ + userId, + type: "error", + title: "Enrichment Failed", + message: + error.message || "Failed to enrich library metadata", + }); }); - }); res.json({ success: true, message: "Library enrichment started in background", }); } catch (error: any) { - console.error("Start enrichment error:", error); - res.status(500).json({ error: error.message || "Failed to start enrichment" }); + logger.error("Start enrichment error:", error); + res.status(500).json({ + error: error.message || "Failed to start enrichment", + }); } }); /** - * PUT /library/artists/:id/metadata - * Update artist metadata manually + * GET /enrichment/failures + * Get all enrichment failures with filtering + */ +router.get("/failures", async (req, res) => { + try { + const { entityType, includeSkipped, includeResolved, limit, offset } = + req.query; + + const options: any = {}; + if (entityType) options.entityType = entityType as string; + if (includeSkipped === "true") options.includeSkipped = true; + if (includeResolved === "true") options.includeResolved = true; + if (limit) options.limit = parseInt(limit as string); + if (offset) options.offset = parseInt(offset as string); + + const result = await enrichmentFailureService.getFailures(options); + res.json(result); + } catch (error) { + logger.error("Get failures error:", error); + res.status(500).json({ error: "Failed to get failures" }); + } +}); + +/** + * GET /enrichment/failures/counts + * Get failure counts by type + */ +router.get("/failures/counts", async (req, res) => { + try { + const counts = await enrichmentFailureService.getFailureCounts(); + res.json(counts); + } catch (error) { + logger.error("Get failure counts error:", error); + res.status(500).json({ error: "Failed to get failure counts" }); + } +}); + +/** + * POST /enrichment/retry + * Retry specific failed items + */ +router.post("/retry", requireAdmin, async (req, res) => { + try { + const { ids } = req.body; + + if (!ids || !Array.isArray(ids) || ids.length === 0) { + return res + .status(400) + .json({ error: "Must provide array of failure IDs" }); + } + + // Reset retry count for these failures + await enrichmentFailureService.resetRetryCount(ids); + + // Get the failures to determine what to retry + const failures = await Promise.all( + ids.map((id) => enrichmentFailureService.getFailure(id)) + ); + + // Group by type and trigger appropriate re-enrichment + const { prisma } = await import("../utils/db"); + let queued = 0; + let skipped = 0; + + for (const failure of failures) { + if (!failure) continue; + + try { + if (failure.entityType === "artist") { + // Check if artist still exists + const artist = await prisma.artist.findUnique({ + where: { id: failure.entityId }, + select: { id: true }, + }); + + if (!artist) { + // Entity was deleted - mark failure as resolved + await enrichmentFailureService.resolveFailures([ + failure.id, + ]); + skipped++; + continue; + } + + // Reset artist enrichment status + await prisma.artist.update({ + where: { id: failure.entityId }, + data: { enrichmentStatus: "pending" }, + }); + queued++; + } else if (failure.entityType === "track") { + // Check if track still exists + const track = await prisma.track.findUnique({ + where: { id: failure.entityId }, + select: { id: true }, + }); + + if (!track) { + // Entity was deleted - mark failure as resolved + await enrichmentFailureService.resolveFailures([ + failure.id, + ]); + skipped++; + continue; + } + + // Reset track tag status + await prisma.track.update({ + where: { id: failure.entityId }, + data: { lastfmTags: [] }, + }); + queued++; + } else if (failure.entityType === "audio") { + // Check if track still exists + const track = await prisma.track.findUnique({ + where: { id: failure.entityId }, + select: { id: true }, + }); + + if (!track) { + // Entity was deleted - mark failure as resolved + await enrichmentFailureService.resolveFailures([ + failure.id, + ]); + skipped++; + continue; + } + + // Reset audio analysis status + await prisma.track.update({ + where: { id: failure.entityId }, + data: { + analysisStatus: "pending", + analysisRetryCount: 0, + }, + }); + queued++; + } + } catch (error) { + logger.error( + `Failed to reset ${failure.entityType} ${failure.entityId}:`, + error + ); + // Don't re-throw - continue processing other failures + } + } + + res.json({ + message: `Queued ${queued} items for retry, ${skipped} skipped (entities no longer exist)`, + queued, + skipped, + }); + } catch (error: any) { + logger.error("Retry failures error:", error); + res.status(500).json({ + error: error.message || "Failed to retry failures", + }); + } +}); + +/** + * POST /enrichment/skip + * Skip specific failures (won't retry automatically) + */ +router.post("/skip", requireAdmin, async (req, res) => { + try { + const { ids } = req.body; + + if (!ids || !Array.isArray(ids) || ids.length === 0) { + return res + .status(400) + .json({ error: "Must provide array of failure IDs" }); + } + + const count = await enrichmentFailureService.skipFailures(ids); + res.json({ + message: `Skipped ${count} failures`, + count, + }); + } catch (error: any) { + logger.error("Skip failures error:", error); + res.status(500).json({ + error: error.message || "Failed to skip failures", + }); + } +}); + +/** + * DELETE /enrichment/failures/:id + * Delete a specific failure record + */ +router.delete("/failures/:id", requireAdmin, async (req, res) => { + try { + const count = await enrichmentFailureService.deleteFailures([ + req.params.id, + ]); + res.json({ + message: "Failure deleted", + count, + }); + } catch (error: any) { + logger.error("Delete failure error:", error); + res.status(500).json({ + error: error.message || "Failed to delete failure", + }); + } +}); + +/** + * PUT /enrichment/artists/:id/metadata + * Update artist metadata manually (non-destructive overrides) + * User edits are stored as overrides; canonical data preserved for API lookups */ router.put("/artists/:id/metadata", async (req, res) => { try { - const { name, bio, genres, mbid, heroUrl } = req.body; + const { name, bio, genres, heroUrl } = req.body; const updateData: any = {}; - if (name) updateData.name = name; - if (bio) updateData.summary = bio; - if (mbid) updateData.mbid = mbid; - if (heroUrl) updateData.heroUrl = heroUrl; - if (genres) updateData.manualGenres = JSON.stringify(genres); + let hasOverrides = false; - // Mark as manually edited - updateData.manuallyEdited = true; + // Map user edits to override fields (non-destructive) + if (name !== undefined) { + updateData.displayName = name; + hasOverrides = true; + } + if (bio !== undefined) { + updateData.userSummary = bio; + hasOverrides = true; + } + if (heroUrl !== undefined) { + updateData.userHeroUrl = heroUrl; + hasOverrides = true; + } + if (genres !== undefined) { + updateData.userGenres = genres; + hasOverrides = true; + } + + // Set override flag + if (hasOverrides) { + updateData.hasUserOverrides = true; + } const { prisma } = await import("../utils/db"); const artist = await prisma.artist.update({ @@ -236,30 +602,56 @@ router.put("/artists/:id/metadata", async (req, res) => { }, }); + // Invalidate Redis cache for artist hero image + try { + await redisClient.del(`hero:${req.params.id}`); + } catch (err) { + logger.warn("Failed to invalidate Redis cache:", err); + } + res.json(artist); } catch (error: any) { - console.error("Update artist metadata error:", error); - res.status(500).json({ error: error.message || "Failed to update artist" }); + logger.error("Update artist metadata error:", error); + res.status(500).json({ + error: error.message || "Failed to update artist", + }); } }); /** - * PUT /library/albums/:id/metadata - * Update album metadata manually + * PUT /enrichment/albums/:id/metadata + * Update album metadata manually (non-destructive overrides) + * User edits are stored as overrides; canonical data preserved for API lookups */ router.put("/albums/:id/metadata", async (req, res) => { try { - const { title, year, genres, rgMbid, coverUrl } = req.body; + const { title, year, genres, coverUrl } = req.body; const updateData: any = {}; - if (title) updateData.title = title; - if (year) updateData.year = parseInt(year); - if (rgMbid) updateData.rgMbid = rgMbid; - if (coverUrl) updateData.coverUrl = coverUrl; - if (genres) updateData.manualGenres = JSON.stringify(genres); + let hasOverrides = false; - // Mark as manually edited - updateData.manuallyEdited = true; + // Map user edits to override fields (non-destructive) + if (title !== undefined) { + updateData.displayTitle = title; + hasOverrides = true; + } + if (year !== undefined) { + updateData.displayYear = parseInt(year); + hasOverrides = true; + } + if (coverUrl !== undefined) { + updateData.userCoverUrl = coverUrl; + hasOverrides = true; + } + if (genres !== undefined) { + updateData.userGenres = genres; + hasOverrides = true; + } + + // Set override flag + if (hasOverrides) { + updateData.hasUserOverrides = true; + } const { prisma } = await import("../utils/db"); const album = await prisma.album.update({ @@ -285,8 +677,348 @@ router.put("/albums/:id/metadata", async (req, res) => { res.json(album); } catch (error: any) { - console.error("Update album metadata error:", error); - res.status(500).json({ error: error.message || "Failed to update album" }); + logger.error("Update album metadata error:", error); + res.status(500).json({ + error: error.message || "Failed to update album", + }); + } +}); + +/** + * PUT /enrichment/tracks/:id/metadata + * Update track metadata manually (non-destructive overrides) + * User edits are stored as overrides; canonical data preserved + */ +router.put("/tracks/:id/metadata", async (req, res) => { + try { + const { title, trackNo } = req.body; + + const updateData: any = {}; + let hasOverrides = false; + + // Map user edits to override fields (non-destructive) + if (title !== undefined) { + updateData.displayTitle = title; + hasOverrides = true; + } + if (trackNo !== undefined) { + updateData.displayTrackNo = parseInt(trackNo); + hasOverrides = true; + } + + // Set override flag + if (hasOverrides) { + updateData.hasUserOverrides = true; + } + + const { prisma } = await import("../utils/db"); + const track = await prisma.track.update({ + where: { id: req.params.id }, + data: updateData, + include: { + album: { + select: { + id: true, + title: true, + artist: { + select: { + id: true, + name: true, + }, + }, + }, + }, + }, + }); + + res.json(track); + } catch (error: any) { + logger.error("Update track metadata error:", error); + res.status(500).json({ + error: error.message || "Failed to update track", + }); + } +}); + +/** + * POST /enrichment/artists/:id/reset + * Reset artist metadata to canonical values (clear all user overrides) + */ +router.post("/artists/:id/reset", async (req, res) => { + try { + const { prisma } = await import("../utils/db"); + + // Check if artist exists first + const existingArtist = await prisma.artist.findUnique({ + where: { id: req.params.id }, + select: { id: true }, + }); + + if (!existingArtist) { + return res.status(404).json({ + error: "Artist not found", + message: "The artist may have been deleted", + }); + } + + const artist = await prisma.artist.update({ + where: { id: req.params.id }, + data: { + displayName: null, + userSummary: null, + userHeroUrl: null, + userGenres: [], + hasUserOverrides: false, + }, + include: { + albums: { + select: { + id: true, + title: true, + year: true, + coverUrl: true, + }, + }, + }, + }); + + // Invalidate Redis cache for artist hero image + try { + await redisClient.del(`hero:${req.params.id}`); + } catch (err) { + logger.warn("Failed to invalidate Redis cache:", err); + } + + res.json({ + message: "Artist metadata reset to original values", + artist, + }); + } catch (error: any) { + // Handle P2025 specifically in case of race condition + if (error.code === "P2025") { + return res.status(404).json({ + error: "Artist not found", + message: "The artist may have been deleted", + }); + } + logger.error("Reset artist metadata error:", error); + res.status(500).json({ + error: error.message || "Failed to reset artist metadata", + }); + } +}); + +/** + * POST /enrichment/albums/:id/reset + * Reset album metadata to canonical values (clear all user overrides) + */ +router.post("/albums/:id/reset", async (req, res) => { + try { + const { prisma } = await import("../utils/db"); + + // Check if album exists first + const existingAlbum = await prisma.album.findUnique({ + where: { id: req.params.id }, + select: { id: true }, + }); + + if (!existingAlbum) { + return res.status(404).json({ + error: "Album not found", + message: "The album may have been deleted", + }); + } + + const album = await prisma.album.update({ + where: { id: req.params.id }, + data: { + displayTitle: null, + displayYear: null, + userCoverUrl: null, + userGenres: [], + hasUserOverrides: false, + }, + include: { + artist: { + select: { + id: true, + name: true, + }, + }, + tracks: { + select: { + id: true, + title: true, + trackNo: true, + duration: true, + }, + }, + }, + }); + + res.json({ + message: "Album metadata reset to original values", + album, + }); + } catch (error: any) { + // Handle P2025 specifically in case of race condition + if (error.code === "P2025") { + return res.status(404).json({ + error: "Album not found", + message: "The album may have been deleted", + }); + } + logger.error("Reset album metadata error:", error); + res.status(500).json({ + error: error.message || "Failed to reset album metadata", + }); + } +}); + +/** + * POST /enrichment/tracks/:id/reset + * Reset track metadata to canonical values (clear all user overrides) + */ +router.post("/tracks/:id/reset", async (req, res) => { + try { + const { prisma } = await import("../utils/db"); + + // Check if track exists first + const existingTrack = await prisma.track.findUnique({ + where: { id: req.params.id }, + select: { id: true }, + }); + + if (!existingTrack) { + return res.status(404).json({ + error: "Track not found", + message: "The track may have been deleted", + }); + } + + const track = await prisma.track.update({ + where: { id: req.params.id }, + data: { + displayTitle: null, + displayTrackNo: null, + hasUserOverrides: false, + }, + include: { + album: { + select: { + id: true, + title: true, + artist: { + select: { + id: true, + name: true, + }, + }, + }, + }, + }, + }); + + res.json({ + message: "Track metadata reset to original values", + track, + }); + } catch (error: any) { + // Handle P2025 specifically in case of race condition + if (error.code === "P2025") { + return res.status(404).json({ + error: "Track not found", + message: "The track may have been deleted", + }); + } + logger.error("Reset track metadata error:", error); + res.status(500).json({ + error: error.message || "Failed to reset track metadata", + }); + } +}); + +/** + * GET /enrichment/concurrency + * Get current enrichment concurrency configuration + */ +router.get("/concurrency", async (req, res) => { + try { + const settings = await getSystemSettings(); + const concurrency = settings?.enrichmentConcurrency || 1; + + // Calculate estimated speeds based on concurrency + const artistsPerMin = Math.round(10 * concurrency); + const tracksPerMin = Math.round(60 * concurrency); + + res.json({ + concurrency, + estimatedSpeed: `~${artistsPerMin} artists/min, ~${tracksPerMin} tracks/min`, + artistsPerMin, + tracksPerMin, + }); + } catch (error) { + logger.error("Failed to get enrichment settings:", error); + res.status(500).json({ error: "Failed to get enrichment settings" }); + } +}); + +/** + * PUT /enrichment/concurrency + * Update enrichment concurrency configuration + */ +router.put("/concurrency", requireAdmin, async (req, res) => { + try { + const { concurrency } = req.body; + + if (!concurrency || typeof concurrency !== "number") { + return res + .status(400) + .json({ error: "Missing or invalid 'concurrency' parameter" }); + } + + // Clamp concurrency to 1-5 + const clampedConcurrency = Math.max( + 1, + Math.min(5, Math.floor(concurrency)) + ); + + // Update system settings in database + const { prisma } = await import("../utils/db"); + await prisma.systemSettings.upsert({ + where: { id: "default" }, + create: { + id: "default", + enrichmentConcurrency: clampedConcurrency, + }, + update: { + enrichmentConcurrency: clampedConcurrency, + }, + }); + + // Invalidate cache so next read gets fresh value + invalidateSystemSettingsCache(); + + // Update rate limiter concurrency multiplier + rateLimiter.updateConcurrencyMultiplier(clampedConcurrency); + + // Calculate estimated speeds + const artistsPerMin = Math.round(10 * clampedConcurrency); + const tracksPerMin = Math.round(60 * clampedConcurrency); + + logger.debug( + `[Enrichment Settings] Updated concurrency to ${clampedConcurrency}` + ); + + res.json({ + concurrency: clampedConcurrency, + estimatedSpeed: `~${artistsPerMin} artists/min, ~${tracksPerMin} tracks/min`, + artistsPerMin, + tracksPerMin, + }); + } catch (error) { + logger.error("Failed to update enrichment settings:", error); + res.status(500).json({ error: "Failed to update enrichment settings" }); } }); diff --git a/backend/src/routes/homepage.ts b/backend/src/routes/homepage.ts index 3f0e8b2..3f8adc9 100644 --- a/backend/src/routes/homepage.ts +++ b/backend/src/routes/homepage.ts @@ -1,6 +1,7 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuthOrToken } from "../middleware/auth"; -import { prisma } from "../utils/db"; +import { prisma, Prisma } from "../utils/db"; import { redisClient } from "../utils/redis"; const router = Router(); @@ -22,14 +23,14 @@ router.get("/genres", async (req, res) => { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(`[HOMEPAGE] Cache HIT for genres`); + logger.debug(`[HOMEPAGE] Cache HIT for genres`); return res.json(JSON.parse(cached)); } } catch (cacheError) { - console.warn("[HOMEPAGE] Redis cache read error:", cacheError); + logger.warn("[HOMEPAGE] Redis cache read error:", cacheError); } - console.log( + logger.debug( `[HOMEPAGE] ✗ Cache MISS for genres, fetching from database...` ); @@ -37,7 +38,7 @@ router.get("/genres", async (req, res) => { const albums = await prisma.album.findMany({ where: { genres: { - isEmpty: false, // Only albums with genres + not: Prisma.JsonNull, // Only albums with genres (not null) }, location: "LIBRARY", // Exclude discovery albums }, @@ -60,8 +61,11 @@ router.get("/genres", async (req, res) => { // Count genre occurrences const genreCounts = new Map(); for (const album of albums) { - for (const genre of album.genres) { - genreCounts.set(genre, (genreCounts.get(genre) || 0) + 1); + const genres = album.genres as string[]; + if (genres && Array.isArray(genres)) { + for (const genre of genres) { + genreCounts.set(genre, (genreCounts.get(genre) || 0) + 1); + } } } @@ -71,12 +75,15 @@ router.get("/genres", async (req, res) => { .slice(0, limitNum) .map(([genre]) => genre); - console.log(`[HOMEPAGE] Top genres: ${topGenres.join(", ")}`); + logger.debug(`[HOMEPAGE] Top genres: ${topGenres.join(", ")}`); // For each top genre, get sample albums (up to 10) const genresWithAlbums = topGenres.map((genre) => { const genreAlbums = albums - .filter((a) => a.genres.includes(genre)) + .filter((a) => { + const genres = a.genres as string[]; + return genres && Array.isArray(genres) && genres.includes(genre); + }) .slice(0, 10) .map((a) => ({ id: a.id, @@ -103,14 +110,14 @@ router.get("/genres", async (req, res) => { 24 * 60 * 60, JSON.stringify(genresWithAlbums) ); - console.log(`[HOMEPAGE] Cached genres for 24 hours`); + logger.debug(`[HOMEPAGE] Cached genres for 24 hours`); } catch (cacheError) { - console.warn("[HOMEPAGE] Redis cache write error:", cacheError); + logger.warn("[HOMEPAGE] Redis cache write error:", cacheError); } res.json(genresWithAlbums); } catch (error) { - console.error("Get homepage genres error:", error); + logger.error("Get homepage genres error:", error); res.status(500).json({ error: "Failed to fetch genres" }); } }); @@ -129,14 +136,14 @@ router.get("/top-podcasts", async (req, res) => { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(`[HOMEPAGE] Cache HIT for top podcasts`); + logger.debug(`[HOMEPAGE] Cache HIT for top podcasts`); return res.json(JSON.parse(cached)); } } catch (cacheError) { - console.warn("[HOMEPAGE] Redis cache read error:", cacheError); + logger.warn("[HOMEPAGE] Redis cache read error:", cacheError); } - console.log( + logger.debug( `[HOMEPAGE] ✗ Cache MISS for top podcasts, fetching from database...` ); @@ -172,14 +179,14 @@ router.get("/top-podcasts", async (req, res) => { 24 * 60 * 60, JSON.stringify(result) ); - console.log(`[HOMEPAGE] Cached top podcasts for 24 hours`); + logger.debug(`[HOMEPAGE] Cached top podcasts for 24 hours`); } catch (cacheError) { - console.warn("[HOMEPAGE] Redis cache write error:", cacheError); + logger.warn("[HOMEPAGE] Redis cache write error:", cacheError); } res.json(result); } catch (error) { - console.error("Get top podcasts error:", error); + logger.error("Get top podcasts error:", error); res.status(500).json({ error: "Failed to fetch top podcasts" }); } }); diff --git a/backend/src/routes/library.ts b/backend/src/routes/library.ts index 11ac41f..d8fb6c3 100644 --- a/backend/src/routes/library.ts +++ b/backend/src/routes/library.ts @@ -2,9 +2,10 @@ import { Router, Response } from "express"; import { requireAuth, requireAuthOrToken } from "../middleware/auth"; import { imageLimiter, apiLimiter } from "../middleware/rateLimiter"; import { lastFmService } from "../services/lastfm"; -import { prisma } from "../utils/db"; +import { prisma, Prisma } from "../utils/db"; import { getEnrichmentProgress } from "../workers/enrichment"; import { redisClient } from "../utils/redis"; +import { logger } from "../utils/logger"; import crypto from "crypto"; import path from "path"; import fs from "fs"; @@ -22,6 +23,15 @@ import { organizeSingles } from "../workers/organizeSingles"; import { enrichSimilarArtist } from "../workers/artistEnrichment"; import { extractColorsFromImage } from "../utils/colorExtractor"; import { dataCacheService } from "../services/dataCache"; +import { + getMergedGenres, + getArtistDisplaySummary, +} from "../utils/metadataOverrides"; +import { + getEffectiveYear, + getDecadeWhereClause, + getDecadeFromYear, +} from "../utils/dateFilters"; const router = Router(); @@ -105,12 +115,12 @@ router.post("/scan", async (req, res) => { const { organizeSingles } = await import( "../workers/organizeSingles" ); - console.log("[Scan] Organizing SLSKD downloads before scan..."); + logger.info("[Scan] Organizing SLSKD downloads before scan..."); await organizeSingles(); - console.log("[Scan] SLSKD organization complete"); + logger.info("[Scan] SLSKD organization complete"); } catch (err: any) { // Not a fatal error - SLSKD might not be running or have no files - console.log("[Scan] SLSKD organization skipped:", err.message); + logger.info("[Scan] SLSKD organization skipped:", err.message); } const userId = req.user?.id || "system"; @@ -127,7 +137,7 @@ router.post("/scan", async (req, res) => { musicPath: config.music.musicPath, }); } catch (error) { - console.error("Scan trigger error:", error); + logger.error("Scan trigger error:", error); res.status(500).json({ error: "Failed to start scan" }); } }); @@ -151,7 +161,7 @@ router.get("/scan/status/:jobId", async (req, res) => { result, }); } catch (error) { - console.error("Get scan status error:", error); + logger.error("Get scan status error:", error); res.status(500).json({ error: "Failed to get job status" }); } }); @@ -161,12 +171,12 @@ router.post("/organize", async (req, res) => { try { // Run in background organizeSingles().catch((err) => { - console.error("Manual organization failed:", err); + logger.error("Manual organization failed:", err); }); res.json({ message: "Organization started in background" }); } catch (error) { - console.error("Organization trigger error:", error); + logger.error("Organization trigger error:", error); res.status(500).json({ error: "Failed to start organization" }); } }); @@ -186,12 +196,12 @@ router.post("/artists/:id/enrich", async (req, res) => { // Run enrichment in background enrichSimilarArtist(artist).catch((err) => { - console.error(`Failed to enrich artist ${artist.name}:`, err); + logger.error(`Failed to enrich artist ${artist.name}:`, err); }); res.json({ message: "Artist enrichment started in background" }); } catch (error) { - console.error("Enrich artist error:", error); + logger.error("Enrich artist error:", error); res.status(500).json({ error: "Failed to enrich artist" }); } }); @@ -202,7 +212,7 @@ router.get("/enrichment-progress", async (req, res) => { const progress = await getEnrichmentProgress(); res.json(progress); } catch (error) { - console.error("Failed to get enrichment progress:", error); + logger.error("Failed to get enrichment progress:", error); res.status(500).json({ error: "Failed to get enrichment progress" }); } }); @@ -221,7 +231,7 @@ router.post("/re-enrich-all", async (req, res) => { }, }); - console.log( + logger.debug( ` Reset ${result.count} artists with missing images to pending` ); @@ -230,7 +240,7 @@ router.post("/re-enrich-all", async (req, res) => { count: result.count, }); } catch (error) { - console.error("Failed to reset artists:", error); + logger.error("Failed to reset artists:", error); res.status(500).json({ error: "Failed to reset artists" }); } }); @@ -269,6 +279,7 @@ router.get("/recently-listened", async (req, res) => { mbid: true, name: true, heroUrl: true, + userHeroUrl: true, }, }, }, @@ -283,7 +294,7 @@ router.get("/recently-listened", async (req, res) => { isFinished: false, currentTime: { gt: 0 }, // Only show if actually started }, - orderBy: { lastPlayedAt: "desc" }, + orderBy: { lastPlayedAt: Prisma.SortOrder.desc }, take: Math.ceil(limitNum / 3), // Get up to 1/3 for audiobooks }), prisma.podcastProgress.findMany({ @@ -292,7 +303,7 @@ router.get("/recently-listened", async (req, res) => { isFinished: false, currentTime: { gt: 0 }, // Only show if actually started }, - orderBy: { lastPlayedAt: "desc" }, + orderBy: { lastPlayedAt: Prisma.SortOrder.desc }, take: limitNum * 2, // Get extra to account for deduplication include: { episode: { @@ -406,11 +417,12 @@ router.get("/recently-listened", async (req, res) => { if (item.type === "audiobook" || item.type === "podcast") { return item; } else { - let coverArt = item.heroUrl; + // Use override pattern: userHeroUrl ?? heroUrl + let coverArt = item.userHeroUrl ?? item.heroUrl; // Fetch image on-demand if missing if (!coverArt) { - console.log( + logger.debug( `[IMAGE] Fetching image on-demand for ${item.name}...` ); @@ -420,7 +432,7 @@ router.get("/recently-listened", async (req, res) => { const cached = await redisClient.get(cacheKey); if (cached) { coverArt = cached; - console.log(` Found cached image`); + logger.debug(` Found cached image`); } } catch (err) { // Redis errors are non-critical @@ -481,7 +493,7 @@ router.get("/recently-listened", async (req, res) => { if (largestImage && largestImage["#text"]) { coverArt = largestImage["#text"]; - console.log(` Found Last.fm image`); + logger.debug(` Found Last.fm image`); } } } catch (err) { @@ -497,7 +509,7 @@ router.get("/recently-listened", async (req, res) => { 7 * 24 * 60 * 60, coverArt ); - console.log(` Cached image for 7 days`); + logger.debug(` Cached image for 7 days`); } catch (err) { // Redis errors are non-critical } @@ -515,7 +527,7 @@ router.get("/recently-listened", async (req, res) => { res.json({ items: results }); } catch (error) { - console.error("Get recently listened error:", error); + logger.error("Get recently listened error:", error); res.status(500).json({ error: "Failed to fetch recently listened" }); } }); @@ -542,6 +554,7 @@ router.get("/recently-added", async (req, res) => { mbid: true, name: true, heroUrl: true, + userHeroUrl: true, }, }, }, @@ -575,10 +588,11 @@ router.get("/recently-added", async (req, res) => { // For artists without heroUrl, fetch images on-demand const artistsWithImages = await Promise.all( Array.from(artistsMap.values()).map(async (artist) => { - let coverArt = artist.heroUrl; + // Use override pattern: userHeroUrl ?? heroUrl + let coverArt = artist.userHeroUrl ?? artist.heroUrl; if (!coverArt) { - console.log( + logger.debug( `[IMAGE] Fetching image on-demand for ${artist.name}...` ); @@ -588,7 +602,7 @@ router.get("/recently-added", async (req, res) => { const cached = await redisClient.get(cacheKey); if (cached) { coverArt = cached; - console.log(` Found cached image`); + logger.debug(` Found cached image`); } } catch (err) { // Redis errors are non-critical @@ -649,7 +663,7 @@ router.get("/recently-added", async (req, res) => { if (largestImage && largestImage["#text"]) { coverArt = largestImage["#text"]; - console.log(` Found Last.fm image`); + logger.debug(` Found Last.fm image`); } } } catch (err) { @@ -665,7 +679,7 @@ router.get("/recently-added", async (req, res) => { 7 * 24 * 60 * 60, coverArt ); - console.log(` Cached image for 7 days`); + logger.debug(` Cached image for 7 days`); } catch (err) { // Redis errors are non-critical } @@ -682,7 +696,7 @@ router.get("/recently-added", async (req, res) => { res.json({ artists: artistsWithImages }); } catch (error) { - console.error("Get recently added error:", error); + logger.error("Get recently added error:", error); res.status(500).json({ error: "Failed to fetch recently added" }); } }); @@ -696,7 +710,10 @@ router.get("/artists", async (req, res) => { offset: offsetParam = "0", filter = "owned", // owned (default), discovery, all } = req.query; - const limit = Math.min(parseInt(limitParam as string, 10) || 500, MAX_LIMIT); + const limit = Math.min( + parseInt(limitParam as string, 10) || 500, + MAX_LIMIT + ); const offset = parseInt(offsetParam as string, 10) || 0; // Build where clause based on filter @@ -784,6 +801,7 @@ router.get("/artists", async (req, res) => { mbid: true, name: true, heroUrl: true, + userHeroUrl: true, albums: { where: { ...(albumLocationFilter @@ -805,11 +823,131 @@ router.get("/artists", async (req, res) => { // Use DataCacheService for batch image lookup (DB + Redis, no API calls for lists) const imageMap = await dataCacheService.getArtistImagesBatch( - artistsWithAlbums.map((a) => ({ id: a.id, heroUrl: a.heroUrl })) + artistsWithAlbums.map((a) => ({ id: a.id, heroUrl: a.heroUrl, userHeroUrl: a.userHeroUrl })) + ); + + // ========== ON-DEMAND IMAGE FETCHING FOR LIBRARY ARTISTS ========== + // For artists without images, fetch on-demand (fixes Bug 2: Artist images missing on Library page) + const artistsWithoutImages = artistsWithAlbums.filter( + (artist) => !imageMap.get(artist.id) && !artist.heroUrl + ); + + logger.debug( + `[Library] Found ${artistsWithoutImages.length} artists without images, fetching on-demand...` + ); + + // Fetch images with concurrency limit of 5 simultaneous requests + const imageFetchPromises = artistsWithoutImages.map(async (artist) => { + let coverArt: string | null = null; + + logger.debug( + `[IMAGE] Fetching image on-demand for ${artist.name}...` + ); + + // Check Redis cache first + const cacheKey = `hero-image:${artist.id}`; + try { + const cached = await redisClient.get(cacheKey); + if (cached) { + coverArt = cached; + logger.debug(` Found cached image`); + return { artistId: artist.id, coverArt }; + } + } catch (err) { + // Redis errors are non-critical + } + + // Try Fanart.tv if we have real MBID + if (!coverArt && artist.mbid && !artist.mbid.startsWith("temp-")) { + try { + coverArt = await fanartService.getArtistImage(artist.mbid); + } catch (err) { + // Fanart.tv failed, continue to next source + } + } + + // Fallback to Deezer + if (!coverArt) { + try { + coverArt = await deezerService.getArtistImage(artist.name); + } catch (err) { + // Deezer failed, continue to next source + } + } + + // Fallback to Last.fm + if (!coverArt) { + try { + const validMbid = + artist.mbid && !artist.mbid.startsWith("temp-") + ? artist.mbid + : undefined; + const lastfmInfo = await lastFmService.getArtistInfo( + artist.name, + validMbid + ); + + if (lastfmInfo.image && lastfmInfo.image.length > 0) { + const largestImage = + lastfmInfo.image.find( + (img: any) => + img.size === "extralarge" || + img.size === "mega" + ) || + lastfmInfo.image[lastfmInfo.image.length - 1]; + + if (largestImage && largestImage["#text"]) { + coverArt = largestImage["#text"]; + logger.debug(` Found Last.fm image`); + } + } + } catch (err) { + // Last.fm failed, leave as null + } + } + + // Cache the result for 7 days + if (coverArt) { + try { + await redisClient.setEx( + cacheKey, + 7 * 24 * 60 * 60, + coverArt + ); + logger.debug(` Cached image for 7 days`); + } catch (err) { + // Redis errors are non-critical + } + } + + return { artistId: artist.id, coverArt }; + }); + + // Process in batches of 5 for concurrency control + const batchSize = 5; + const fetchedImages = new Map(); + + for (let i = 0; i < imageFetchPromises.length; i += batchSize) { + const batch = imageFetchPromises.slice(i, i + batchSize); + const results = await Promise.allSettled(batch); + + results.forEach((result) => { + if (result.status === "fulfilled" && result.value.coverArt) { + fetchedImages.set(result.value.artistId, result.value.coverArt); + } + }); + } + + logger.debug( + `[Library] Fetched ${fetchedImages.size} new images on-demand` ); const artistsWithImages = artistsWithAlbums.map((artist) => { - const coverArt = imageMap.get(artist.id) || artist.heroUrl || null; + const coverArt = + fetchedImages.get(artist.id) || + imageMap.get(artist.id) || + artist.heroUrl || + null; // Sum up track counts from all albums const trackCount = artist.albums.reduce( (sum, album) => sum + (album._count?.tracks || 0), @@ -833,8 +971,8 @@ router.get("/artists", async (req, res) => { limit, }); } catch (error: any) { - console.error("[Library] Get artists error:", error?.message || error); - console.error("[Library] Stack:", error?.stack); + logger.error("[Library] Get artists error:", error?.message || error); + logger.error("[Library] Stack:", error?.stack); res.status(500).json({ error: "Failed to fetch artists", details: error?.message, @@ -925,7 +1063,7 @@ router.get("/enrichment-diagnostics", async (req, res) => { ].filter(Boolean), }); } catch (error: any) { - console.error( + logger.error( "[Library] Enrichment diagnostics error:", error?.message ); @@ -947,11 +1085,58 @@ router.post("/retry-enrichment", async (req, res) => { count: result.count, }); } catch (error: any) { - console.error("[Library] Retry enrichment error:", error?.message); + logger.error("[Library] Retry enrichment error:", error?.message); res.status(500).json({ error: "Failed to retry enrichment" }); } }); +// POST /library/backfill-genres - Backfill genres for artists missing them +router.post("/backfill-genres", async (req, res) => { + try { + // Find artists that have been enriched but have no genres + const artistsToBackfill = await prisma.artist.findMany({ + where: { + enrichmentStatus: "completed", + OR: [ + { genres: { equals: Prisma.DbNull } }, + { genres: { equals: [] } }, + ], + }, + select: { id: true, name: true, mbid: true }, + take: 50, // Process in batches + }); + + if (artistsToBackfill.length === 0) { + return res.json({ + message: "No artists need genre backfill", + count: 0, + }); + } + + // Reset these artists to pending so enrichment worker re-processes them + const result = await prisma.artist.updateMany({ + where: { + id: { in: artistsToBackfill.map(a => a.id) }, + }, + data: { + enrichmentStatus: "pending", + lastEnriched: null, + }, + }); + + logger.info(`[Backfill] Reset ${result.count} artists for genre enrichment`); + + res.json({ + message: `Reset ${result.count} artists for genre enrichment`, + count: result.count, + artists: artistsToBackfill.map(a => a.name).slice(0, 10), + }); + } catch (error: any) { + logger.error("[Backfill] Genre backfill error:", error?.message); + res.status(500).json({ error: "Failed to backfill genres" }); + } +}); + // GET /library/artists/:id router.get("/artists/:id", async (req, res) => { try { @@ -959,10 +1144,10 @@ router.get("/artists/:id", async (req, res) => { const artistInclude = { albums: { - orderBy: { year: "desc" }, + orderBy: { year: Prisma.SortOrder.desc }, include: { tracks: { - orderBy: { trackNo: "asc" }, + orderBy: { trackNo: Prisma.SortOrder.asc }, take: 10, // Top tracks include: { album: { @@ -1029,7 +1214,7 @@ router.get("/artists/:id", async (req, res) => { // If artist has temp MBID, try to find real MBID by searching MusicBrainz let effectiveMbid = artist.mbid; if (!effectiveMbid || effectiveMbid.startsWith("temp-")) { - console.log( + logger.debug( ` Artist has temp/no MBID, searching MusicBrainz for ${artist.name}...` ); try { @@ -1039,7 +1224,7 @@ router.get("/artists/:id", async (req, res) => { ); if (searchResults.length > 0) { effectiveMbid = searchResults[0].id; - console.log(` Found MBID: ${effectiveMbid}`); + logger.debug(` Found MBID: ${effectiveMbid}`); // Update database with real MBID for future use (skip if duplicate) try { @@ -1050,23 +1235,23 @@ router.get("/artists/:id", async (req, res) => { } catch (mbidError: any) { // If MBID already exists for another artist, just log and continue if (mbidError.code === "P2002") { - console.log( + logger.debug( `MBID ${effectiveMbid} already exists for another artist, skipping update` ); } else { - console.error( + logger.error( ` ✗ Failed to update MBID:`, mbidError ); } } } else { - console.log( + logger.debug( ` ✗ No MusicBrainz match found for ${artist.name}` ); } } catch (error) { - console.error(` ✗ MusicBrainz search failed:`, error); + logger.error(` MusicBrainz search failed:`, error); } } @@ -1079,7 +1264,7 @@ router.get("/artists/:id", async (req, res) => { source: "database" as const, })); - console.log( + logger.debug( `[Artist] Found ${dbAlbums.length} albums from database (actual owned files)` ); @@ -1098,11 +1283,11 @@ router.get("/artists/:id", async (req, res) => { const cachedDisco = await redisClient.get(discoCacheKey); if (cachedDisco && cachedDisco !== "NOT_FOUND") { releaseGroups = JSON.parse(cachedDisco); - console.log( + logger.debug( `[Artist] Using cached discography (${releaseGroups.length} albums)` ); } else { - console.log( + logger.debug( `[Artist] Fetching discography from MusicBrainz...` ); releaseGroups = await musicBrainzService.getReleaseGroups( @@ -1118,7 +1303,7 @@ router.get("/artists/:id", async (req, res) => { ); } - console.log( + logger.debug( ` Got ${releaseGroups.length} albums from MusicBrainz (before filtering)` ); @@ -1153,7 +1338,7 @@ router.get("/artists/:id", async (req, res) => { } ); - console.log( + logger.debug( ` Filtered to ${filteredReleaseGroups.length} studio albums/EPs` ); @@ -1208,10 +1393,10 @@ router.get("/artists/:id", async (req, res) => { albumsWithOwnership = [...dbAlbums, ...mbAlbumsFiltered]; - console.log( + logger.debug( ` Total albums: ${albumsWithOwnership.length} (${dbAlbums.length} owned from database, ${mbAlbumsFiltered.length} from MusicBrainz)` ); - console.log( + logger.debug( ` Owned: ${ albumsWithOwnership.filter((a) => a.owned).length }, Available: ${ @@ -1219,7 +1404,7 @@ router.get("/artists/:id", async (req, res) => { }` ); } catch (error) { - console.error( + logger.error( `Failed to fetch MusicBrainz discography:`, error ); @@ -1228,7 +1413,7 @@ router.get("/artists/:id", async (req, res) => { } } else { // No valid MBID - just use database albums - console.log( + logger.debug( `[Artist] No valid MBID, using ${dbAlbums.length} albums from database` ); albumsWithOwnership = dbAlbums; @@ -1236,9 +1421,7 @@ router.get("/artists/:id", async (req, res) => { // Extract top tracks from library first const allTracks = artist.albums.flatMap((a) => a.tracks); - let topTracks = allTracks - .sort((a, b) => (b.playCount || 0) - (a.playCount || 0)) - .slice(0, 10); + let topTracks = allTracks.slice(0, 10); // Get user play counts for all tracks const userId = req.user!.id; @@ -1266,7 +1449,7 @@ router.get("/artists/:id", async (req, res) => { if (cachedTopTracks && cachedTopTracks !== "NOT_FOUND") { lastfmTopTracks = JSON.parse(cachedTopTracks); - console.log( + logger.debug( `[Artist] Using cached top tracks (${lastfmTopTracks.length})` ); } else { @@ -1286,7 +1469,7 @@ router.get("/artists/:id", async (req, res) => { 24 * 60 * 60, JSON.stringify(lastfmTopTracks) ); - console.log( + logger.debug( `[Artist] Cached ${lastfmTopTracks.length} top tracks` ); } @@ -1306,7 +1489,7 @@ router.get("/artists/:id", async (req, res) => { ...matchedTrack, playCount: lfmTrack.playcount ? parseInt(lfmTrack.playcount) - : matchedTrack.playCount, + : 0, listeners: lfmTrack.listeners ? parseInt(lfmTrack.listeners) : 0, @@ -1344,7 +1527,7 @@ router.get("/artists/:id", async (req, res) => { topTracks = combinedTracks.slice(0, 10); } catch (error) { - console.error( + logger.error( `Failed to get Last.fm top tracks for ${artist.name}:`, error ); @@ -1380,20 +1563,27 @@ router.get("/artists/:id", async (req, res) => { if (enrichedSimilar && enrichedSimilar.length > 0) { // Use pre-enriched data from database (fast path) - console.log( + logger.debug( `[Artist] Using ${enrichedSimilar.length} similar artists from enriched JSON` ); // First, batch lookup which similar artists exist in our library - const similarNames = enrichedSimilar.slice(0, 10).map((s) => s.name.toLowerCase()); - const similarMbids = enrichedSimilar.slice(0, 10).map((s) => s.mbid).filter(Boolean) as string[]; - + const similarNames = enrichedSimilar + .slice(0, 10) + .map((s) => s.name.toLowerCase()); + const similarMbids = enrichedSimilar + .slice(0, 10) + .map((s) => s.mbid) + .filter(Boolean) as string[]; + // Find library artists matching by name or mbid const libraryMatches = await prisma.artist.findMany({ where: { OR: [ { normalizedName: { in: similarNames } }, - ...(similarMbids.length > 0 ? [{ mbid: { in: similarMbids } }] : []), + ...(similarMbids.length > 0 + ? [{ mbid: { in: similarMbids } }] + : []), ], }, select: { @@ -1405,25 +1595,37 @@ router.get("/artists/:id", async (req, res) => { _count: { select: { albums: { - where: { location: "LIBRARY", tracks: { some: {} } }, + where: { + location: "LIBRARY", + tracks: { some: {} }, + }, }, }, }, }, }); - + // Create lookup maps for quick matching - const libraryByName = new Map(libraryMatches.map((a) => [a.normalizedName?.toLowerCase() || a.name.toLowerCase(), a])); - const libraryByMbid = new Map(libraryMatches.filter((a) => a.mbid).map((a) => [a.mbid!, a])); + const libraryByName = new Map( + libraryMatches.map((a) => [ + a.normalizedName?.toLowerCase() || a.name.toLowerCase(), + a, + ]) + ); + const libraryByMbid = new Map( + libraryMatches.filter((a) => a.mbid).map((a) => [a.mbid!, a]) + ); // Fetch images in parallel from Deezer (cached in Redis) const similarWithImages = await Promise.all( enrichedSimilar.slice(0, 10).map(async (s) => { // Check if this artist is in our library - const libraryArtist = (s.mbid && libraryByMbid.get(s.mbid)) || libraryByName.get(s.name.toLowerCase()); - + const libraryArtist = + (s.mbid && libraryByMbid.get(s.mbid)) || + libraryByName.get(s.name.toLowerCase()); + let image = libraryArtist?.heroUrl || null; - + // If no library image, try Deezer if (!image) { try { @@ -1433,7 +1635,9 @@ router.get("/artists/:id", async (req, res) => { if (cached && cached !== "NOT_FOUND") { image = cached; } else { - image = await deezerService.getArtistImage(s.name); + image = await deezerService.getArtistImage( + s.name + ); if (image) { await redisClient.setEx( cacheKey, @@ -1466,12 +1670,12 @@ router.get("/artists/:id", async (req, res) => { const cachedSimilar = await redisClient.get(similarCacheKey); if (cachedSimilar && cachedSimilar !== "NOT_FOUND") { similarArtists = JSON.parse(cachedSimilar); - console.log( + logger.debug( `[Artist] Using cached similar artists (${similarArtists.length})` ); } else { // Cache miss - fetch from Last.fm - console.log( + logger.debug( `[Artist] Fetching similar artists from Last.fm...` ); @@ -1479,7 +1683,7 @@ router.get("/artists/:id", async (req, res) => { const validMbid = effectiveMbid && !effectiveMbid.startsWith("temp-") ? effectiveMbid - : undefined; + : ""; const lastfmSimilar = await lastFmService.getSimilarArtists( validMbid, artist.name, @@ -1487,14 +1691,20 @@ router.get("/artists/:id", async (req, res) => { ); // Batch lookup which similar artists exist in our library - const similarNames = lastfmSimilar.map((s: any) => s.name.toLowerCase()); - const similarMbids = lastfmSimilar.map((s: any) => s.mbid).filter(Boolean) as string[]; - + const similarNames = lastfmSimilar.map((s: any) => + s.name.toLowerCase() + ); + const similarMbids = lastfmSimilar + .map((s: any) => s.mbid) + .filter(Boolean) as string[]; + const libraryMatches = await prisma.artist.findMany({ where: { OR: [ { normalizedName: { in: similarNames } }, - ...(similarMbids.length > 0 ? [{ mbid: { in: similarMbids } }] : []), + ...(similarMbids.length > 0 + ? [{ mbid: { in: similarMbids } }] + : []), ], }, select: { @@ -1506,23 +1716,38 @@ router.get("/artists/:id", async (req, res) => { _count: { select: { albums: { - where: { location: "LIBRARY", tracks: { some: {} } }, + where: { + location: "LIBRARY", + tracks: { some: {} }, + }, }, }, }, }, }); - - const libraryByName = new Map(libraryMatches.map((a) => [a.normalizedName?.toLowerCase() || a.name.toLowerCase(), a])); - const libraryByMbid = new Map(libraryMatches.filter((a) => a.mbid).map((a) => [a.mbid!, a])); + + const libraryByName = new Map( + libraryMatches.map((a) => [ + a.normalizedName?.toLowerCase() || + a.name.toLowerCase(), + a, + ]) + ); + const libraryByMbid = new Map( + libraryMatches + .filter((a) => a.mbid) + .map((a) => [a.mbid!, a]) + ); // Fetch images in parallel (Deezer only - fastest source) const similarWithImages = await Promise.all( lastfmSimilar.map(async (s: any) => { - const libraryArtist = (s.mbid && libraryByMbid.get(s.mbid)) || libraryByName.get(s.name.toLowerCase()); - + const libraryArtist = + (s.mbid && libraryByMbid.get(s.mbid)) || + libraryByName.get(s.name.toLowerCase()); + let image = libraryArtist?.heroUrl || null; - + if (!image) { try { image = await deezerService.getArtistImage( @@ -1539,7 +1764,8 @@ router.get("/artists/:id", async (req, res) => { mbid: s.mbid || null, coverArt: image, albumCount: 0, - ownedAlbumCount: libraryArtist?._count?.albums || 0, + ownedAlbumCount: + libraryArtist?._count?.albums || 0, weight: s.match, inLibrary: !!libraryArtist, }; @@ -1554,11 +1780,11 @@ router.get("/artists/:id", async (req, res) => { 24 * 60 * 60, JSON.stringify(similarArtists) ); - console.log( + logger.debug( `[Artist] Cached ${similarArtists.length} similar artists` ); } catch (error) { - console.error( + logger.error( `[Artist] Failed to fetch similar artists:`, error ); @@ -1570,12 +1796,14 @@ router.get("/artists/:id", async (req, res) => { res.json({ ...artist, coverArt: heroUrl, // Use fetched hero image (falls back to artist.heroUrl) + bio: getArtistDisplaySummary(artist), + genres: getMergedGenres(artist), albums: albumsWithOwnership, topTracks, similarArtists, }); } catch (error) { - console.error("Get artist error:", error); + logger.error("Get artist error:", error); res.status(500).json({ error: "Failed to fetch artist" }); } }); @@ -1589,7 +1817,10 @@ router.get("/albums", async (req, res) => { offset: offsetParam = "0", filter = "owned", // owned (default), discovery, all } = req.query; - const limit = Math.min(parseInt(limitParam as string, 10) || 500, MAX_LIMIT); + const limit = Math.min( + parseInt(limitParam as string, 10) || 500, + MAX_LIMIT + ); const offset = parseInt(offsetParam as string, 10) || 0; let where: any = { @@ -1658,8 +1889,8 @@ router.get("/albums", async (req, res) => { limit, }); } catch (error: any) { - console.error("[Library] Get albums error:", error?.message || error); - console.error("[Library] Stack:", error?.stack); + logger.error("[Library] Get albums error:", error?.message || error); + logger.error("[Library] Stack:", error?.stack); res.status(500).json({ error: "Failed to fetch albums", details: error?.message, @@ -1684,7 +1915,7 @@ router.get("/albums/:id", async (req, res) => { }, }, tracks: { - orderBy: { trackNo: "asc" }, + orderBy: { trackNo: Prisma.SortOrder.asc }, }, }, }); @@ -1702,7 +1933,7 @@ router.get("/albums/:id", async (req, res) => { }, }, tracks: { - orderBy: { trackNo: "asc" }, + orderBy: { trackNo: Prisma.SortOrder.asc }, }, }, }); @@ -1728,7 +1959,7 @@ router.get("/albums/:id", async (req, res) => { coverArt: album.coverUrl, }); } catch (error) { - console.error("Get album error:", error); + logger.error("Get album error:", error); res.status(500).json({ error: "Failed to fetch album" }); } }); @@ -1736,8 +1967,15 @@ router.get("/albums/:id", async (req, res) => { // GET /library/tracks?albumId=&limit=100&offset=0 router.get("/tracks", async (req, res) => { try { - const { albumId, limit: limitParam = "100", offset: offsetParam = "0" } = req.query; - const limit = Math.min(parseInt(limitParam as string, 10) || 100, MAX_LIMIT); + const { + albumId, + limit: limitParam = "100", + offset: offsetParam = "0", + } = req.query; + const limit = Math.min( + parseInt(limitParam as string, 10) || 100, + MAX_LIMIT + ); const offset = parseInt(offsetParam as string, 10) || 0; const where: any = {}; @@ -1778,7 +2016,7 @@ router.get("/tracks", async (req, res) => { res.json({ tracks, total, offset, limit }); } catch (error) { - console.error("Get tracks error:", error); + logger.error("Get tracks error:", error); res.status(500).json({ error: "Failed to fetch tracks" }); } }); @@ -1787,7 +2025,10 @@ router.get("/tracks", async (req, res) => { router.get("/tracks/shuffle", async (req, res) => { try { const { limit: limitParam = "100" } = req.query; - const limit = Math.min(parseInt(limitParam as string, 10) || 100, MAX_LIMIT); + const limit = Math.min( + parseInt(limitParam as string, 10) || 100, + MAX_LIMIT + ); // Get total count of tracks const totalTracks = await prisma.track.count(); @@ -1797,7 +2038,7 @@ router.get("/tracks/shuffle", async (req, res) => { } // For small libraries, fetch all and shuffle in memory - // For large libraries, use random offset sampling for better performance + // For large libraries, use database-level randomization for memory efficiency let tracksData; if (totalTracks <= limit) { // Fetch all tracks and shuffle @@ -1821,21 +2062,19 @@ router.get("/tracks/shuffle", async (req, res) => { [tracksData[i], tracksData[j]] = [tracksData[j], tracksData[i]]; } } else { - // For large libraries, sample random tracks using multiple random offsets - // This provides good randomization without loading entire library - const sampleSize = Math.min(limit, totalTracks); - const offsets = new Set(); + // For large libraries, use database-level randomization + // Get random track IDs first (efficient, O(limit) memory) + const randomIds = await prisma.$queryRaw<{ id: string }[]>` + SELECT id FROM "Track" + ORDER BY RANDOM() + LIMIT ${limit} + `; - // Generate unique random offsets - while (offsets.size < sampleSize) { - offsets.add(Math.floor(Math.random() * totalTracks)); - } - - // Fetch tracks at random offsets (batch for efficiency) - const offsetArray = Array.from(offsets); + // Then fetch full track data for selected IDs tracksData = await prisma.track.findMany({ - skip: 0, - take: totalTracks, // We'll filter by our offsets + where: { + id: { in: randomIds.map((r) => r.id) }, + }, include: { album: { include: { @@ -1850,11 +2089,7 @@ router.get("/tracks/shuffle", async (req, res) => { }, }); - // Pick tracks at our random indices and shuffle - const selectedTracks = offsetArray.map(idx => tracksData[idx]).filter(Boolean); - tracksData = selectedTracks; - - // Fisher-Yates shuffle + // Shuffle the result to maintain randomness (findMany doesn't preserve order) for (let i = tracksData.length - 1; i > 0; i--) { const j = Math.floor(Math.random() * (i + 1)); [tracksData[i], tracksData[j]] = [tracksData[j], tracksData[i]]; @@ -1872,7 +2107,7 @@ router.get("/tracks/shuffle", async (req, res) => { res.json({ tracks, total: totalTracks }); } catch (error) { - console.error("Shuffle tracks error:", error); + logger.error("Shuffle tracks error:", error); res.status(500).json({ error: "Failed to shuffle tracks" }); } }); @@ -1912,7 +2147,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { coverUrl = `${audiobookshelfBaseUrl}/api/${audiobookPath}`; // Fetch with authentication - console.log( + logger.debug( `[COVER-ART] Fetching audiobook cover: ${coverUrl.substring( 0, 100 @@ -1926,7 +2161,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { }); if (!imageResponse.ok) { - console.error( + logger.error( `[COVER-ART] Failed to fetch audiobook cover: ${coverUrl} (${imageResponse.status} ${imageResponse.statusText})` ); return res @@ -1963,13 +2198,13 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { nativePath ); - console.log( + logger.debug( `[COVER-ART] Serving native cover: ${coverCachePath}` ); // Check if file exists if (!fs.existsSync(coverCachePath)) { - console.error( + logger.error( `[COVER-ART] Native cover not found: ${coverCachePath}` ); return res @@ -2040,7 +2275,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { } // Native cover file missing - try to find album and fetch from Deezer - console.warn( + logger.warn( `[COVER-ART] Native cover not found: ${coverCachePath}, trying Deezer fallback` ); @@ -2070,7 +2305,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { } } } catch (error) { - console.error( + logger.error( `[COVER-ART] Failed to fetch Deezer fallback for ${albumId}:`, error ); @@ -2102,7 +2337,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { coverUrl = `${audiobookshelfBaseUrl}/api/${audiobookPath}`; // Fetch with authentication - console.log( + logger.debug( `[COVER-ART] Fetching audiobook cover: ${coverUrl.substring( 0, 100 @@ -2116,7 +2351,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { }); if (!imageResponse.ok) { - console.error( + logger.error( `[COVER-ART] Failed to fetch audiobook cover: ${coverUrl} (${imageResponse.status} ${imageResponse.statusText})` ); return res @@ -2170,7 +2405,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { // Check if this is a cached 404 if (cachedData.notFound) { - console.log( + logger.debug( `[COVER-ART] Cached 404 for ${coverUrl.substring( 0, 60 @@ -2181,14 +2416,14 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { .json({ error: "Cover art not found" }); } - console.log( + logger.debug( `[COVER-ART] Cache HIT for ${coverUrl.substring(0, 60)}...` ); const imageBuffer = Buffer.from(cachedData.data, "base64"); // Check if client has cached version if (req.headers["if-none-match"] === cachedData.etag) { - console.log(`[COVER-ART] Client has cached version (304)`); + logger.debug(`[COVER-ART] Client has cached version (304)`); return res.status(304).end(); } @@ -2207,7 +2442,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { res.setHeader("ETag", cachedData.etag); return res.send(imageBuffer); } else { - console.log( + logger.debug( `[COVER-ART] ✗ Cache MISS for ${coverUrl.substring( 0, 60 @@ -2215,18 +2450,18 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { ); } } catch (cacheError) { - console.warn("[COVER-ART] Redis cache read error:", cacheError); + logger.warn("[COVER-ART] Redis cache read error:", cacheError); } // Fetch the image and proxy it to avoid CORS issues - console.log(`[COVER-ART] Fetching: ${coverUrl.substring(0, 100)}...`); + logger.debug(`[COVER-ART] Fetching: ${coverUrl.substring(0, 100)}...`); const imageResponse = await fetch(coverUrl, { headers: { "User-Agent": "Lidify/1.0", }, }); if (!imageResponse.ok) { - console.error( + logger.error( `[COVER-ART] Failed to fetch: ${coverUrl} (${imageResponse.status} ${imageResponse.statusText})` ); @@ -2238,9 +2473,9 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { 60 * 60, // 1 hour JSON.stringify({ notFound: true }) ); - console.log(`[COVER-ART] Cached 404 response for 1 hour`); + logger.debug(`[COVER-ART] Cached 404 response for 1 hour`); } catch (cacheError) { - console.warn( + logger.warn( "[COVER-ART] Redis cache write error:", cacheError ); @@ -2249,7 +2484,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { return res.status(404).json({ error: "Cover art not found" }); } - console.log(`[COVER-ART] Successfully fetched, caching...`); + logger.debug(`[COVER-ART] Successfully fetched, caching...`); const buffer = await imageResponse.arrayBuffer(); const imageBuffer = Buffer.from(buffer); @@ -2270,7 +2505,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { }) ); } catch (cacheError) { - console.warn("Redis cache write error:", cacheError); + logger.warn("Redis cache write error:", cacheError); } // Check if client has cached version @@ -2292,7 +2527,7 @@ router.get("/cover-art/:id?", imageLimiter, async (req, res) => { // Send the image res.send(imageBuffer); } catch (error) { - console.error("Get cover art error:", error); + logger.error("Get cover art error:", error); res.status(500).json({ error: "Failed to fetch cover art" }); } }); @@ -2318,7 +2553,7 @@ router.get("/album-cover/:mbid", imageLimiter, async (req, res) => { res.json({ coverUrl }); } catch (error) { - console.error("Get album cover error:", error); + logger.error("Get album cover error:", error); res.status(500).json({ error: "Failed to fetch cover art" }); } }); @@ -2339,7 +2574,7 @@ router.get("/cover-art-colors", imageLimiter, async (req, res) => { imageUrl.includes("placeholder") || imageUrl.startsWith("/placeholder") ) { - console.log( + logger.debug( `[COLORS] Placeholder image detected, returning fallback colors` ); return res.json({ @@ -2362,21 +2597,21 @@ router.get("/cover-art-colors", imageLimiter, async (req, res) => { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log( + logger.debug( `[COLORS] Cache HIT for ${imageUrl.substring(0, 60)}...` ); return res.json(JSON.parse(cached)); } else { - console.log( + logger.debug( `[COLORS] ✗ Cache MISS for ${imageUrl.substring(0, 60)}...` ); } } catch (cacheError) { - console.warn("[COLORS] Redis cache read error:", cacheError); + logger.warn("[COLORS] Redis cache read error:", cacheError); } // Fetch the image - console.log( + logger.debug( `[COLORS] Fetching image: ${imageUrl.substring(0, 100)}...` ); const imageResponse = await fetch(imageUrl, { @@ -2386,7 +2621,7 @@ router.get("/cover-art-colors", imageLimiter, async (req, res) => { }); if (!imageResponse.ok) { - console.error( + logger.error( `[COLORS] Failed to fetch image: ${imageUrl} (${imageResponse.status})` ); return res.status(404).json({ error: "Image not found" }); @@ -2398,7 +2633,7 @@ router.get("/cover-art-colors", imageLimiter, async (req, res) => { // Extract colors using sharp const colors = await extractColorsFromImage(imageBuffer); - console.log(`[COLORS] Extracted colors:`, colors); + logger.debug(`[COLORS] Extracted colors:`, colors); // Cache the result for 30 days try { @@ -2407,14 +2642,14 @@ router.get("/cover-art-colors", imageLimiter, async (req, res) => { 30 * 24 * 60 * 60, // 30 days JSON.stringify(colors) ); - console.log(`[COLORS] Cached colors for 30 days`); + logger.debug(`[COLORS] Cached colors for 30 days`); } catch (cacheError) { - console.warn("[COLORS] Redis cache write error:", cacheError); + logger.warn("[COLORS] Redis cache write error:", cacheError); } res.json(colors); } catch (error) { - console.error("Extract colors error:", error); + logger.error("Extract colors error:", error); res.status(500).json({ error: "Failed to extract colors" }); } }); @@ -2422,12 +2657,12 @@ router.get("/cover-art-colors", imageLimiter, async (req, res) => { // GET /library/tracks/:id/stream router.get("/tracks/:id/stream", async (req, res) => { try { - console.log("[STREAM] Request received for track:", req.params.id); + logger.debug("[STREAM] Request received for track:", req.params.id); const { quality } = req.query; const userId = req.user?.id; if (!userId) { - console.log("[STREAM] No userId in session - unauthorized"); + logger.debug("[STREAM] No userId in session - unauthorized"); return res.status(401).json({ error: "Unauthorized" }); } @@ -2436,7 +2671,7 @@ router.get("/tracks/:id/stream", async (req, res) => { }); if (!track) { - console.log("[STREAM] Track not found"); + logger.debug("[STREAM] Track not found"); return res.status(404).json({ error: "Track not found" }); } @@ -2459,7 +2694,7 @@ router.get("/tracks/:id/stream", async (req, res) => { trackId: track.id, }, }); - console.log("[STREAM] Logged new play for track:", track.title); + logger.debug("[STREAM] Logged new play for track:", track.title); } // Get user's quality preference @@ -2476,7 +2711,7 @@ router.get("/tracks/:id/stream", async (req, res) => { const ext = track.filePath ? path.extname(track.filePath).toLowerCase() : ""; - console.log( + logger.debug( `[STREAM] Quality: requested=${ quality || "default" }, using=${requestedQuality}, format=${ext}` @@ -2501,7 +2736,7 @@ router.get("/tracks/:id/stream", async (req, res) => { normalizedFilePath ); - console.log( + logger.debug( `[STREAM] Using native file: ${track.filePath} (${requestedQuality})` ); @@ -2515,7 +2750,7 @@ router.get("/tracks/:id/stream", async (req, res) => { ); // Stream file with range support - console.log( + logger.debug( `[STREAM] Sending file: ${filePath}, mimeType: ${mimeType}` ); @@ -2536,9 +2771,9 @@ router.get("/tracks/:id/stream", async (req, res) => { // Always destroy the streaming service to clean up intervals streamingService.destroy(); if (err) { - console.error(`[STREAM] sendFile error:`, err); + logger.error(`[STREAM] sendFile error:`, err); } else { - console.log( + logger.debug( `[STREAM] File sent successfully: ${path.basename( filePath )}` @@ -2554,7 +2789,7 @@ router.get("/tracks/:id/stream", async (req, res) => { err.code === "FFMPEG_NOT_FOUND" && requestedQuality !== "original" ) { - console.warn( + logger.warn( `[STREAM] FFmpeg not available, falling back to original quality` ); const fallbackFilePath = track.filePath.replace(/\\/g, "/"); @@ -2594,7 +2829,7 @@ router.get("/tracks/:id/stream", async (req, res) => { // Always destroy the streaming service to clean up intervals streamingService.destroy(); if (err) { - console.error( + logger.error( `[STREAM] sendFile fallback error:`, err ); @@ -2604,7 +2839,7 @@ router.get("/tracks/:id/stream", async (req, res) => { return; } - console.error("[STREAM] Native streaming failed:", err.message); + logger.error("[STREAM] Native streaming failed:", err.message); return res .status(500) .json({ error: "Failed to stream track" }); @@ -2612,10 +2847,10 @@ router.get("/tracks/:id/stream", async (req, res) => { } // No file path available - console.log("[STREAM] Track has no file path - unavailable"); + logger.debug("[STREAM] Track has no file path - unavailable"); return res.status(404).json({ error: "Track not available" }); } catch (error) { - console.error("Stream track error:", error); + logger.error("Stream track error:", error); res.status(500).json({ error: "Failed to stream track" }); } }); @@ -2661,7 +2896,7 @@ router.get("/tracks/:id", async (req, res) => { res.json(formattedTrack); } catch (error) { - console.error("Get track error:", error); + logger.error("Get track error:", error); res.status(500).json({ error: "Failed to fetch track" }); } }); @@ -2694,10 +2929,10 @@ router.delete("/tracks/:id", async (req, res) => { if (fs.existsSync(absolutePath)) { fs.unlinkSync(absolutePath); - console.log(`[DELETE] Deleted file: ${absolutePath}`); + logger.debug(`[DELETE] Deleted file: ${absolutePath}`); } } catch (err) { - console.warn("[DELETE] Could not delete file:", err); + logger.warn("[DELETE] Could not delete file:", err); // Continue with database deletion even if file deletion fails } } @@ -2707,11 +2942,11 @@ router.delete("/tracks/:id", async (req, res) => { where: { id: track.id }, }); - console.log(`[DELETE] Deleted track: ${track.title}`); + logger.debug(`[DELETE] Deleted track: ${track.title}`); res.json({ message: "Track deleted successfully" }); } catch (error) { - console.error("Delete track error:", error); + logger.error("Delete track error:", error); res.status(500).json({ error: "Failed to delete track" }); } }); @@ -2750,7 +2985,7 @@ router.delete("/albums/:id", async (req, res) => { deletedFiles++; } } catch (err) { - console.warn("[DELETE] Could not delete file:", err); + logger.warn("[DELETE] Could not delete file:", err); } } } @@ -2768,13 +3003,13 @@ router.delete("/albums/:id", async (req, res) => { const files = fs.readdirSync(albumFolder); if (files.length === 0) { fs.rmdirSync(albumFolder); - console.log( + logger.debug( `[DELETE] Deleted empty album folder: ${albumFolder}` ); } } } catch (err) { - console.warn("[DELETE] Could not delete album folder:", err); + logger.warn("[DELETE] Could not delete album folder:", err); } // Delete from database (cascade will delete tracks) @@ -2782,7 +3017,7 @@ router.delete("/albums/:id", async (req, res) => { where: { id: album.id }, }); - console.log( + logger.debug( `[DELETE] Deleted album: ${album.title} (${deletedFiles} files)` ); @@ -2791,7 +3026,7 @@ router.delete("/albums/:id", async (req, res) => { deletedFiles, }); } catch (error) { - console.error("Delete album error:", error); + logger.error("Delete album error:", error); res.status(500).json({ error: "Failed to delete album" }); } }); @@ -2859,7 +3094,7 @@ router.delete("/artists/:id", async (req, res) => { } } } catch (err) { - console.warn("[DELETE] Could not delete file:", err); + logger.warn("[DELETE] Could not delete file:", err); } } } @@ -2869,7 +3104,7 @@ router.delete("/artists/:id", async (req, res) => { for (const artistFolder of artistFoldersToDelete) { try { if (fs.existsSync(artistFolder)) { - console.log( + logger.debug( `[DELETE] Attempting to delete folder: ${artistFolder}` ); @@ -2878,12 +3113,12 @@ router.delete("/artists/:id", async (req, res) => { recursive: true, force: true, }); - console.log( + logger.debug( `[DELETE] Successfully deleted artist folder: ${artistFolder}` ); } } catch (err: any) { - console.error( + logger.error( `[DELETE] Failed to delete artist folder ${artistFolder}:`, err?.message || err ); @@ -2903,9 +3138,9 @@ router.delete("/artists/:id", async (req, res) => { } else { fs.unlinkSync(filePath); } - console.log(`[DELETE] Deleted: ${filePath}`); + logger.debug(`[DELETE] Deleted: ${filePath}`); } catch (fileErr: any) { - console.error( + logger.error( `[DELETE] Could not delete ${filePath}:`, fileErr?.message ); @@ -2913,11 +3148,11 @@ router.delete("/artists/:id", async (req, res) => { } // Try deleting the now-empty folder fs.rmdirSync(artistFolder); - console.log( + logger.debug( `[DELETE] Deleted artist folder after manual cleanup: ${artistFolder}` ); } catch (cleanupErr: any) { - console.error( + logger.error( `[DELETE] Cleanup also failed for ${artistFolder}:`, cleanupErr?.message ); @@ -2939,11 +3174,11 @@ router.delete("/artists/:id", async (req, res) => { ) { try { fs.rmSync(commonPath, { recursive: true, force: true }); - console.log( + logger.debug( `[DELETE] Deleted additional artist folder: ${commonPath}` ); } catch (err: any) { - console.error( + logger.error( `[DELETE] Could not delete ${commonPath}:`, err?.message ); @@ -2962,16 +3197,16 @@ router.delete("/artists/:id", async (req, res) => { true ); if (lidarrResult.success) { - console.log(`[DELETE] Lidarr: ${lidarrResult.message}`); + logger.debug(`[DELETE] Lidarr: ${lidarrResult.message}`); lidarrDeleted = true; } else { - console.warn( + logger.warn( `[DELETE] Lidarr deletion note: ${lidarrResult.message}` ); lidarrError = lidarrResult.message; } } catch (err: any) { - console.warn( + logger.warn( "[DELETE] Could not delete from Lidarr:", err?.message || err ); @@ -2985,18 +3220,18 @@ router.delete("/artists/:id", async (req, res) => { where: { artistId: artist.id }, }); } catch (err) { - console.warn("[DELETE] Could not delete OwnedAlbum records:", err); + logger.warn("[DELETE] Could not delete OwnedAlbum records:", err); } // Delete from database (cascade will delete albums and tracks) - console.log( + logger.debug( `[DELETE] Deleting artist from database: ${artist.name} (${artist.id})` ); await prisma.artist.delete({ where: { id: artist.id }, }); - console.log( + logger.debug( `[DELETE] Successfully deleted artist: ${ artist.name } (${deletedFiles} files${ @@ -3011,8 +3246,8 @@ router.delete("/artists/:id", async (req, res) => { lidarrError, }); } catch (error: any) { - console.error("Delete artist error:", error?.message || error); - console.error("Delete artist stack:", error?.stack); + logger.error("Delete artist error:", error?.message || error); + logger.error("Delete artist stack:", error?.stack); res.status(500).json({ error: "Failed to delete artist", details: error?.message || "Unknown error", @@ -3038,70 +3273,39 @@ router.get("/genres", async (req, res) => { ) ); - // Get genres from TrackGenre relation (most accurate) - const trackGenres = await prisma.genre.findMany({ - include: { - _count: { - select: { trackGenres: true }, - }, - }, - }); + // Query Artist.genres field (populated by enrichment from Last.fm tags) + // Use raw SQL to expand JSONB array and count tracks per genre + const minTracks = 15; // Minimum tracks for a genre to show up + const genreResults = await prisma.$queryRaw< + { genre: string; track_count: bigint }[] + >` + SELECT LOWER(g.genre) as genre, COUNT(DISTINCT t.id) as track_count + FROM "Artist" ar + CROSS JOIN LATERAL jsonb_array_elements_text(ar.genres::jsonb) AS g(genre) + JOIN "Album" a ON a."artistId" = ar.id + JOIN "Track" t ON t."albumId" = a.id + WHERE ar.genres IS NOT NULL + GROUP BY LOWER(g.genre) + HAVING COUNT(DISTINCT t.id) >= ${minTracks} + ORDER BY track_count DESC + LIMIT 20 + `; - const genreMap = new Map(); + // Filter out artist names and convert bigint to number + const genres = genreResults + .map((row) => ({ + genre: row.genre, + count: Number(row.track_count), + })) + .filter((g) => !artistNames.has(g.genre.toLowerCase())); - // Add track genre counts (excluding artist names) - for (const g of trackGenres) { - if (g.name && g._count.trackGenres > 0) { - const normalized = g.name.trim(); - // Skip if it matches an artist name - if (normalized && !artistNames.has(normalized.toLowerCase())) { - genreMap.set(normalized, g._count.trackGenres); - } - } - } - - // Fallback: Get genres from Album.genres JSON field if no TrackGenres - if (genreMap.size === 0) { - const albums = await prisma.album.findMany({ - where: { - genres: { not: null }, - }, - select: { - genres: true, - _count: { select: { tracks: true } }, - }, - }); - - for (const album of albums) { - const albumGenres = album.genres as string[] | null; - if (albumGenres && Array.isArray(albumGenres)) { - for (const genre of albumGenres) { - const normalized = genre.trim(); - // Skip if it matches an artist name - if ( - normalized && - !artistNames.has(normalized.toLowerCase()) - ) { - genreMap.set( - normalized, - (genreMap.get(normalized) || 0) + - album._count.tracks - ); - } - } - } - } - } - - // Convert to array and sort by count - const genres = Array.from(genreMap.entries()) - .map(([genre, count]) => ({ genre, count })) - .sort((a, b) => b.count - a.count) - .slice(0, 20); // Top 20 genres + logger.debug( + `[Genres] Found ${genres.length} genres from Artist.genres (min ${minTracks} tracks)` + ); res.json({ genres }); } catch (error) { - console.error("Genres endpoint error:", error); + logger.error("Genres endpoint error:", error); res.status(500).json({ error: "Failed to get genres" }); } }); @@ -3113,24 +3317,23 @@ router.get("/genres", async (req, res) => { */ router.get("/decades", async (req, res) => { try { - // Get all albums with year and track count + // Get all albums with year fields and track count const albums = await prisma.album.findMany({ - where: { - year: { not: null }, - }, select: { year: true, + originalYear: true, + displayYear: true, _count: { select: { tracks: true } }, }, }); - // Group by decade + // Group by decade using effective year (displayYear > originalYear > year) const decadeMap = new Map(); for (const album of albums) { - if (album.year) { - // Calculate decade start (e.g., 1987 -> 1980, 2023 -> 2020) - const decadeStart = Math.floor(album.year / 10) * 10; + const effectiveYear = getEffectiveYear(album); + if (effectiveYear) { + const decadeStart = getDecadeFromYear(effectiveYear); decadeMap.set( decadeStart, (decadeMap.get(decadeStart) || 0) + album._count.tracks @@ -3146,7 +3349,7 @@ router.get("/decades", async (req, res) => { res.json({ decades }); } catch (error) { - console.error("Decades endpoint error:", error); + logger.error("Decades endpoint error:", error); res.status(500).json({ error: "Failed to get decades" }); } }); @@ -3223,7 +3426,7 @@ router.get("/radio", async (req, res) => { trackIds = mostPlayedTracks.map((t) => t.id); } else { // No play data yet - just get random tracks - console.log( + logger.debug( "[Radio:favorites] No play data found, returning random tracks" ); const randomTracks = await prisma.track.findMany({ @@ -3237,16 +3440,10 @@ router.get("/radio", async (req, res) => { case "decade": // Filter by decade (e.g., value = "1990" for 90s) const decadeStart = parseInt(value as string) || 2000; - const decadeEnd = decadeStart + 9; const decadeTracks = await prisma.track.findMany({ where: { - album: { - year: { - gte: decadeStart, - lte: decadeEnd, - }, - }, + album: getDecadeWhereClause(decadeStart), }, select: { id: true }, take: limitNum * 3, @@ -3255,51 +3452,36 @@ router.get("/radio", async (req, res) => { break; case "genre": - // Filter by genre (matches against album or track genre tags) + // Filter by genre (uses Artist.genres and Artist.userGenres) const genreValue = ((value as string) || "").toLowerCase(); - // Strategy 1: Check trackGenres relation (most reliable) - const genreRelationTracks = await prisma.track.findMany({ - where: { - trackGenres: { - some: { - genre: { - name: { - contains: genreValue, - mode: "insensitive", - }, - }, - }, - }, - }, - select: { id: true }, - take: limitNum * 2, - }); - trackIds = genreRelationTracks.map((t) => t.id); + // Query Artist.genres and userGenres fields with raw SQL + // Join Artist → Album → Track and filter by genre using LIKE for partial matching + // Check BOTH canonical genres AND user-added genres (OR condition) + const genreTracks = await prisma.$queryRaw< + { id: string }[] + >` + SELECT DISTINCT t.id + FROM "Artist" ar + JOIN "Album" a ON a."artistId" = ar.id + JOIN "Track" t ON t."albumId" = a.id + WHERE ( + (ar.genres IS NOT NULL AND EXISTS ( + SELECT 1 FROM jsonb_array_elements_text(ar.genres::jsonb) AS g(genre) + WHERE LOWER(g.genre) LIKE ${"%" + genreValue + "%"} + )) + OR + (ar."userGenres" IS NOT NULL AND EXISTS ( + SELECT 1 FROM jsonb_array_elements_text(ar."userGenres"::jsonb) AS ug(genre) + WHERE LOWER(ug.genre) LIKE ${"%" + genreValue + "%"} + )) + ) + LIMIT ${limitNum * 2} + `; + trackIds = genreTracks.map((t) => t.id); - // Strategy 2: If not enough, check album.genres JSON field with raw query - if (trackIds.length < limitNum) { - const albumGenreTracks = await prisma.$queryRaw< - { id: string }[] - >` - SELECT t.id - FROM "Track" t - JOIN "Album" a ON t."albumId" = a.id - WHERE a.genres IS NOT NULL - AND EXISTS ( - SELECT 1 FROM jsonb_array_elements_text(a.genres::jsonb) AS g - WHERE LOWER(g) LIKE ${"%" + genreValue + "%"} - ) - LIMIT ${limitNum * 2} - `; - const newIds = albumGenreTracks - .map((t) => t.id) - .filter((id) => !trackIds.includes(id)); - trackIds = [...trackIds, ...newIds]; - } - - console.log( - `[Radio:genre] Found ${trackIds.length} tracks for genre "${genreValue}"` + logger.debug( + `[Radio:genre] Found ${trackIds.length} tracks for genre "${genreValue}" from Artist.genres and userGenres` ); break; @@ -3402,7 +3584,7 @@ router.get("/radio", async (req, res) => { take: limitNum * 2, }); workoutTrackIds = energyTracks.map((t) => t.id); - console.log( + logger.debug( `[Radio:workout] Found ${workoutTrackIds.length} tracks via audio analysis` ); @@ -3453,7 +3635,7 @@ router.get("/radio", async (req, res) => { workoutTrackIds = [ ...new Set([...workoutTrackIds, ...genreTrackIds]), ]; - console.log( + logger.debug( `[Radio:workout] After genre check: ${workoutTrackIds.length} tracks` ); @@ -3476,7 +3658,7 @@ router.get("/radio", async (req, res) => { ...albumGenreTracks.map((t) => t.id), ]), ]; - console.log( + logger.debug( `[Radio:workout] After album genre check: ${workoutTrackIds.length} tracks` ); } @@ -3495,7 +3677,7 @@ router.get("/radio", async (req, res) => { .json({ error: "Artist ID required for artist radio" }); } - console.log( + logger.debug( `[Radio:artist] Starting artist radio for: ${artistId}` ); @@ -3510,7 +3692,7 @@ router.get("/radio", async (req, res) => { danceability: true, }, }); - console.log( + logger.debug( `[Radio:artist] Found ${artistTracks.length} tracks from artist` ); @@ -3547,7 +3729,7 @@ router.get("/radio", async (req, res) => { ) / analyzedTracks.length, } : null; - console.log(`[Radio:artist] Artist vibe:`, avgVibe); + logger.debug(`[Radio:artist] Artist vibe:`, avgVibe); // 2. Get library artist IDs (artists user actually owns) const ownedArtists = await prisma.ownedAlbum.findMany({ @@ -3558,7 +3740,7 @@ router.get("/radio", async (req, res) => { ownedArtists.map((o) => o.artistId) ); libraryArtistIds.delete(artistId); // Exclude the current artist - console.log( + logger.debug( `[Radio:artist] Library has ${libraryArtistIds.size} other artists` ); @@ -3574,7 +3756,7 @@ router.get("/radio", async (req, res) => { let similarArtistIds = similarInLibrary.map( (s) => s.toArtistId ); - console.log( + logger.debug( `[Radio:artist] Found ${similarArtistIds.length} Last.fm similar artists in library` ); @@ -3582,9 +3764,9 @@ router.get("/radio", async (req, res) => { if (similarArtistIds.length < 5 && libraryArtistIds.size > 0) { const artist = await prisma.artist.findUnique({ where: { id: artistId }, - select: { genres: true }, + select: { genres: true, userGenres: true }, }); - const artistGenres = (artist?.genres as string[]) || []; + const artistGenres = getMergedGenres(artist || {}); if (artistGenres.length > 0) { // Find library artists with overlapping genres @@ -3592,14 +3774,17 @@ router.get("/radio", async (req, res) => { where: { id: { in: Array.from(libraryArtistIds) }, }, - select: { id: true, genres: true }, + select: { + id: true, + genres: true, + userGenres: true, + }, }); - // Score artists by genre overlap + // Score artists by genre overlap using merged genres const scoredArtists = genreMatchArtists .map((a) => { - const theirGenres = - (a.genres as string[]) || []; + const theirGenres = getMergedGenres(a); const overlap = artistGenres.filter((g) => theirGenres.some( (tg) => @@ -3624,7 +3809,7 @@ router.get("/radio", async (req, res) => { ...genreArtistIds, ]), ]; - console.log( + logger.debug( `[Radio:artist] After genre matching: ${similarArtistIds.length} similar artists` ); } @@ -3651,7 +3836,7 @@ router.get("/radio", async (req, res) => { danceability: true, }, }); - console.log( + logger.debug( `[Radio:artist] Found ${similarTracks.length} tracks from similar artists` ); } @@ -3710,7 +3895,7 @@ router.get("/radio", async (req, res) => { (b as any).vibeScore - (a as any).vibeScore ); - console.log( + logger.debug( `[Radio:artist] Applied vibe boost, top score: ${( similarTracks[0] as any )?.vibeScore?.toFixed(2)}` @@ -3739,7 +3924,7 @@ router.get("/radio", async (req, res) => { trackIds = [...selectedOriginal, ...selectedSimilar].map( (t) => t.id ); - console.log( + logger.debug( `[Radio:artist] Final mix: ${selectedOriginal.length} original + ${selectedSimilar.length} similar = ${trackIds.length} tracks` ); break; @@ -3754,7 +3939,7 @@ router.get("/radio", async (req, res) => { .json({ error: "Track ID required for vibe matching" }); } - console.log( + logger.debug( `[Radio:vibe] Starting vibe match for track: ${sourceTrackId}` ); @@ -3782,19 +3967,19 @@ router.get("/radio", async (req, res) => { (sourceTrack.moodHappy !== null && sourceTrack.moodSad !== null); - console.log( + logger.debug( `[Radio:vibe] Source: "${sourceTrack.title}" by ${sourceTrack.album.artist.name}` ); - console.log( + logger.debug( `[Radio:vibe] Analysis mode: ${ isEnhancedAnalysis ? "ENHANCED" : "STANDARD" }` ); - console.log( + logger.debug( `[Radio:vibe] Source features: BPM=${sourceTrack.bpm}, Energy=${sourceTrack.energy}, Valence=${sourceTrack.valence}` ); if (isEnhancedAnalysis) { - console.log( + logger.debug( `[Radio:vibe] ML Moods: Happy=${sourceTrack.moodHappy}, Sad=${sourceTrack.moodSad}, Relaxed=${sourceTrack.moodRelaxed}, Aggressive=${sourceTrack.moodAggressive}, Party=${sourceTrack.moodParty}, Acoustic=${sourceTrack.moodAcoustic}, Electronic=${sourceTrack.moodElectronic}` ); } @@ -3860,7 +4045,7 @@ router.get("/radio", async (req, res) => { }, }); - console.log( + logger.debug( `[Radio:vibe] Found ${analyzedTracks.length} analyzed tracks to compare` ); @@ -4129,19 +4314,19 @@ router.get("/radio", async (req, res) => { const enhancedCount = goodMatches.filter( (t) => t.enhanced ).length; - console.log( + logger.debug( `[Radio:vibe] Audio matching found ${ vibeMatchedIds.length } tracks (>${minThreshold * 100}% similarity)` ); - console.log( + logger.debug( `[Radio:vibe] Enhanced matches: ${enhancedCount}, Standard matches: ${ goodMatches.length - enhancedCount }` ); if (goodMatches.length > 0) { - console.log( + logger.debug( `[Radio:vibe] Top match score: ${goodMatches[0].score.toFixed( 2 )} (${ @@ -4165,7 +4350,7 @@ router.get("/radio", async (req, res) => { }); const newIds = artistTracks.map((t) => t.id); vibeMatchedIds = [...vibeMatchedIds, ...newIds]; - console.log( + logger.debug( `[Radio:vibe] Fallback A (same artist): added ${newIds.length} tracks, total: ${vibeMatchedIds.length}` ); } @@ -4213,7 +4398,7 @@ router.get("/radio", async (req, res) => { ); const newIds = similarArtistTracks.map((t) => t.id); vibeMatchedIds = [...vibeMatchedIds, ...newIds]; - console.log( + logger.debug( `[Radio:vibe] Fallback B (similar artists): added ${newIds.length} tracks, total: ${vibeMatchedIds.length}` ); } @@ -4246,7 +4431,7 @@ router.get("/radio", async (req, res) => { }); const newIds = genreTracks.map((t) => t.id); vibeMatchedIds = [...vibeMatchedIds, ...newIds]; - console.log( + logger.debug( `[Radio:vibe] Fallback C (same genre): added ${newIds.length} tracks, total: ${vibeMatchedIds.length}` ); } @@ -4262,13 +4447,13 @@ router.get("/radio", async (req, res) => { }); const newIds = randomTracks.map((t) => t.id); vibeMatchedIds = [...vibeMatchedIds, ...newIds]; - console.log( + logger.debug( `[Radio:vibe] Fallback D (random): added ${newIds.length} tracks, total: ${vibeMatchedIds.length}` ); } trackIds = vibeMatchedIds; - console.log( + logger.debug( `[Radio:vibe] Final vibe queue: ${trackIds.length} tracks` ); break; @@ -4330,9 +4515,9 @@ router.get("/radio", async (req, res) => { // === VIBE QUEUE LOGGING === // Log detailed info for vibe matching analysis (using ordered tracks) if (type === "vibe" && vibeSourceFeatures) { - console.log("\n" + "=".repeat(100)); - console.log("VIBE QUEUE ANALYSIS - Source Track"); - console.log("=".repeat(100)); + logger.debug("\n" + "=".repeat(100)); + logger.debug("VIBE QUEUE ANALYSIS - Source Track"); + logger.debug("=".repeat(100)); // Find source track for logging const srcTrack = await prisma.track.findUnique({ @@ -4346,28 +4531,28 @@ router.get("/radio", async (req, res) => { }); if (srcTrack) { - console.log( + logger.debug( `SOURCE: "${srcTrack.title}" by ${srcTrack.album.artist.name}` ); - console.log(` Album: ${srcTrack.album.title}`); - console.log( + logger.debug(` Album: ${srcTrack.album.title}`); + logger.debug( ` Analysis Mode: ${ (srcTrack as any).analysisMode || "unknown" }` ); - console.log( + logger.debug( ` BPM: ${srcTrack.bpm?.toFixed(1) || "N/A"} | Energy: ${ srcTrack.energy?.toFixed(2) || "N/A" } | Valence: ${srcTrack.valence?.toFixed(2) || "N/A"}` ); - console.log( + logger.debug( ` Danceability: ${ srcTrack.danceability?.toFixed(2) || "N/A" } | Arousal: ${ srcTrack.arousal?.toFixed(2) || "N/A" } | Key: ${srcTrack.keyScale || "N/A"}` ); - console.log( + logger.debug( ` ML Moods: Happy=${ (srcTrack as any).moodHappy?.toFixed(2) || "N/A" }, Sad=${ @@ -4378,31 +4563,31 @@ router.get("/radio", async (req, res) => { (srcTrack as any).moodAggressive?.toFixed(2) || "N/A" }` ); - console.log( + logger.debug( ` Genres: ${ srcTrack.trackGenres .map((tg) => tg.genre.name) .join(", ") || "N/A" }` ); - console.log( + logger.debug( ` Last.fm Tags: ${ ((srcTrack as any).lastfmTags || []).join(", ") || "N/A" }` ); - console.log( + logger.debug( ` Mood Tags: ${ ((srcTrack as any).moodTags || []).join(", ") || "N/A" }` ); } - console.log("\n" + "-".repeat(100)); - console.log( + logger.debug("\n" + "-".repeat(100)); + logger.debug( `VIBE QUEUE - ${orderedTracks.length} tracks (showing up to 50, SORTED BY MATCH SCORE)` ); - console.log("-".repeat(100)); - console.log( + logger.debug("-".repeat(100)); + logger.debug( `${"#".padEnd(3)} | ${"TRACK".padEnd(35)} | ${"ARTIST".padEnd( 20 )} | ${"BPM".padEnd(6)} | ${"ENG".padEnd(5)} | ${"VAL".padEnd( @@ -4411,7 +4596,7 @@ router.get("/radio", async (req, res) => { 4 )} | ${"A".padEnd(4)} | MODE | GENRES` ); - console.log("-".repeat(100)); + logger.debug("-".repeat(100)); orderedTracks.slice(0, 50).forEach((track, i) => { const t = track as any; @@ -4454,7 +4639,7 @@ router.get("/radio", async (req, res) => { .map((tg) => tg.genre.name) .join(", "); - console.log( + logger.debug( `${String(i + 1).padEnd( 3 )} | ${title} | ${artist} | ${bpm} | ${energy} | ${valence} | ${happy} | ${sad} | ${relaxed} | ${aggressive} | ${mode} | ${genres}` @@ -4462,10 +4647,10 @@ router.get("/radio", async (req, res) => { }); if (orderedTracks.length > 50) { - console.log(`... and ${orderedTracks.length - 50} more tracks`); + logger.debug(`... and ${orderedTracks.length - 50} more tracks`); } - console.log("=".repeat(100) + "\n"); + logger.debug("=".repeat(100) + "\n"); } // Transform to match frontend Track interface @@ -4521,7 +4706,7 @@ router.get("/radio", async (req, res) => { res.json(response); } catch (error) { - console.error("Radio endpoint error:", error); + logger.error("Radio endpoint error:", error); res.status(500).json({ error: "Failed to get radio tracks" }); } }); diff --git a/backend/src/routes/listeningState.ts b/backend/src/routes/listeningState.ts index df977c6..aee343e 100644 --- a/backend/src/routes/listeningState.ts +++ b/backend/src/routes/listeningState.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; import { z } from "zod"; @@ -46,7 +47,7 @@ router.post("/", async (req, res) => { .status(400) .json({ error: "Invalid request", details: error.errors }); } - console.error("Update listening state error:", error); + logger.error("Update listening state error:", error); res.status(500).json({ error: "Failed to update listening state" }); } }); @@ -79,7 +80,7 @@ router.get("/", async (req, res) => { res.json(state); } catch (error) { - console.error("Get listening state error:", error); + logger.error("Get listening state error:", error); res.status(500).json({ error: "Failed to get listening state" }); } }); @@ -98,7 +99,7 @@ router.get("/recent", async (req, res) => { res.json(states); } catch (error) { - console.error("Get recent listening states error:", error); + logger.error("Get recent listening states error:", error); res.status(500).json({ error: "Failed to get recent listening states", }); diff --git a/backend/src/routes/mixes.ts b/backend/src/routes/mixes.ts index c6406d9..017eebc 100644 --- a/backend/src/routes/mixes.ts +++ b/backend/src/routes/mixes.ts @@ -1,5 +1,6 @@ import { Router } from "express"; -import { requireAuthOrToken } from "../middleware/auth"; +import { logger } from "../utils/logger"; +import { requireAuthOrToken, requireAdmin } from "../middleware/auth"; import { programmaticPlaylistService } from "../services/programmaticPlaylists"; import { moodBucketService, @@ -93,7 +94,7 @@ router.get("/", async (req, res) => { res.json(mixes); } catch (error) { - console.error("Get mixes error:", error); + logger.error("Get mixes error:", error); res.status(500).json({ error: "Failed to get mixes" }); } }); @@ -252,7 +253,7 @@ router.post("/mood", async (req, res) => { .map((id: string) => tracks.find((t) => t.id === id)) .filter((t: any) => t !== undefined); - console.log( + logger.debug( `[MIXES] Generated mood-on-demand mix with ${mix.trackCount} tracks` ); @@ -261,7 +262,7 @@ router.post("/mood", async (req, res) => { tracks: orderedTracks, }); } catch (error) { - console.error("Generate mood mix error:", error); + logger.error("Generate mood mix error:", error); res.status(500).json({ error: "Failed to generate mood mix" }); } }); @@ -430,11 +431,11 @@ router.post("/mood/save-preferences", async (req, res) => { const cacheKey = `mixes:${userId}`; await redisClient.del(cacheKey); - console.log(`[MIXES] Saved mood mix preferences for user ${userId}`); + logger.debug(`[MIXES] Saved mood mix preferences for user ${userId}`); res.json({ success: true, message: "Mood preferences saved" }); } catch (error) { - console.error("Save mood preferences error:", error); + logger.error("Save mood preferences error:", error); res.status(500).json({ error: "Failed to save mood preferences" }); } }); @@ -462,7 +463,7 @@ router.get("/mood/buckets/presets", async (req, res) => { const presets = await moodBucketService.getMoodPresets(); res.json(presets); } catch (error) { - console.error("Get mood presets error:", error); + logger.error("Get mood presets error:", error); res.status(500).json({ error: "Failed to get mood presets" }); } }); @@ -535,7 +536,7 @@ router.get("/mood/buckets/:mood", async (req, res) => { tracks: orderedTracks, }); } catch (error) { - console.error("Get mood bucket mix error:", error); + logger.error("Get mood bucket mix error:", error); res.status(500).json({ error: "Failed to get mood mix" }); } }); @@ -611,7 +612,7 @@ router.post("/mood/buckets/:mood/save", async (req, res) => { .map((id: string) => tracks.find((t) => t.id === id)) .filter((t: any) => t !== undefined); - console.log( + logger.debug( `[MIXES] Saved mood bucket mix for user ${userId}: ${mood} (${savedMix.trackCount} tracks)` ); @@ -623,7 +624,7 @@ router.post("/mood/buckets/:mood/save", async (req, res) => { }, }); } catch (error) { - console.error("Save mood bucket mix error:", error); + logger.error("Save mood bucket mix error:", error); res.status(500).json({ error: "Failed to save mood mix" }); } }); @@ -642,15 +643,14 @@ router.post("/mood/buckets/:mood/save", async (req, res) => { * 200: * description: Backfill completed */ -router.post("/mood/buckets/backfill", async (req, res) => { +router.post("/mood/buckets/backfill", requireAdmin, async (req, res) => { try { const userId = getRequestUserId(req); if (!userId) { return res.status(401).json({ error: "Not authenticated" }); } - // TODO: Add admin check - console.log( + logger.debug( `[MIXES] Starting mood bucket backfill requested by user ${userId}` ); @@ -662,7 +662,7 @@ router.post("/mood/buckets/backfill", async (req, res) => { assigned: result.assigned, }); } catch (error) { - console.error("Backfill mood buckets error:", error); + logger.error("Backfill mood buckets error:", error); res.status(500).json({ error: "Failed to backfill mood buckets" }); } }); @@ -721,7 +721,7 @@ router.post("/refresh", async (req, res) => { res.json({ message: "Mixes refreshed", mixes }); } catch (error) { - console.error("Refresh mixes error:", error); + logger.error("Refresh mixes error:", error); res.status(500).json({ error: "Failed to refresh mixes" }); } }); @@ -849,7 +849,7 @@ router.post("/:id/save", async (req, res) => { data: playlistItems, }); - console.log( + logger.debug( `[MIXES] Saved mix ${mixId} as playlist ${playlist.id} (${mix.trackIds.length} tracks)` ); @@ -859,7 +859,7 @@ router.post("/:id/save", async (req, res) => { trackCount: mix.trackIds.length, }); } catch (error) { - console.error("Save mix as playlist error:", error); + logger.error("Save mix as playlist error:", error); res.status(500).json({ error: "Failed to save mix as playlist" }); } }); @@ -982,7 +982,7 @@ router.get("/:id", async (req, res) => { tracks: orderedTracks, }); } catch (error) { - console.error("Get mix error:", error); + logger.error("Get mix error:", error); res.status(500).json({ error: "Failed to get mix" }); } }); diff --git a/backend/src/routes/notifications.ts b/backend/src/routes/notifications.ts index 99b5ec8..931d614 100644 --- a/backend/src/routes/notifications.ts +++ b/backend/src/routes/notifications.ts @@ -1,6 +1,7 @@ -import { Router, Response } from "express"; +import { Router, Request, Response } from "express"; +import { logger } from "../utils/logger"; import { notificationService } from "../services/notificationService"; -import { AuthenticatedRequest, requireAuth } from "../middleware/auth"; +import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; const router = Router(); @@ -12,9 +13,9 @@ const router = Router(); router.get( "/", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { - console.log( + logger.debug( `[Notifications] Fetching notifications for user ${ req.user!.id }` @@ -22,12 +23,12 @@ router.get( const notifications = await notificationService.getForUser( req.user!.id ); - console.log( + logger.debug( `[Notifications] Found ${notifications.length} notifications` ); res.json(notifications); } catch (error: any) { - console.error("Error fetching notifications:", error); + logger.error("Error fetching notifications:", error); res.status(500).json({ error: "Failed to fetch notifications" }); } } @@ -40,14 +41,14 @@ router.get( router.get( "/unread-count", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { const count = await notificationService.getUnreadCount( req.user!.id ); res.json({ count }); } catch (error: any) { - console.error("Error fetching unread count:", error); + logger.error("Error fetching unread count:", error); res.status(500).json({ error: "Failed to fetch unread count" }); } } @@ -60,12 +61,12 @@ router.get( router.post( "/:id/read", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { await notificationService.markAsRead(req.params.id, req.user!.id); res.json({ success: true }); } catch (error: any) { - console.error("Error marking notification as read:", error); + logger.error("Error marking notification as read:", error); res.status(500).json({ error: "Failed to mark notification as read", }); @@ -80,12 +81,12 @@ router.post( router.post( "/read-all", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { await notificationService.markAllAsRead(req.user!.id); res.json({ success: true }); } catch (error: any) { - console.error("Error marking all notifications as read:", error); + logger.error("Error marking all notifications as read:", error); res.status(500).json({ error: "Failed to mark all notifications as read", }); @@ -100,12 +101,12 @@ router.post( router.post( "/:id/clear", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { await notificationService.clear(req.params.id, req.user!.id); res.json({ success: true }); } catch (error: any) { - console.error("Error clearing notification:", error); + logger.error("Error clearing notification:", error); res.status(500).json({ error: "Failed to clear notification" }); } } @@ -118,12 +119,12 @@ router.post( router.post( "/clear-all", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { await notificationService.clearAll(req.user!.id); res.json({ success: true }); } catch (error: any) { - console.error("Error clearing all notifications:", error); + logger.error("Error clearing all notifications:", error); res.status(500).json({ error: "Failed to clear all notifications", }); @@ -138,11 +139,12 @@ router.post( /** * GET /notifications/downloads/history * Get completed/failed downloads that haven't been cleared + * Deduplicated by album subject (shows only most recent entry per album) */ router.get( "/downloads/history", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { const downloads = await prisma.downloadJob.findMany({ where: { @@ -151,11 +153,23 @@ router.get( cleared: false, }, orderBy: { updatedAt: "desc" }, - take: 50, + take: 100, // Fetch more to account for duplicates }); - res.json(downloads); + + // Deduplicate by subject - keep only the most recent entry per album + const seen = new Set(); + const deduplicated = downloads.filter((download) => { + if (seen.has(download.subject)) { + return false; // Skip duplicate + } + seen.add(download.subject); + return true; // Keep first occurrence (most recent due to ordering) + }); + + // Return top 50 after deduplication + res.json(deduplicated.slice(0, 50)); } catch (error: any) { - console.error("Error fetching download history:", error); + logger.error("Error fetching download history:", error); res.status(500).json({ error: "Failed to fetch download history" }); } } @@ -168,7 +182,7 @@ router.get( router.get( "/downloads/active", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { const downloads = await prisma.downloadJob.findMany({ where: { @@ -179,7 +193,7 @@ router.get( }); res.json(downloads); } catch (error: any) { - console.error("Error fetching active downloads:", error); + logger.error("Error fetching active downloads:", error); res.status(500).json({ error: "Failed to fetch active downloads" }); } } @@ -192,7 +206,7 @@ router.get( router.post( "/downloads/:id/clear", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { await prisma.downloadJob.updateMany({ where: { @@ -203,7 +217,7 @@ router.post( }); res.json({ success: true }); } catch (error: any) { - console.error("Error clearing download:", error); + logger.error("Error clearing download:", error); res.status(500).json({ error: "Failed to clear download" }); } } @@ -216,7 +230,7 @@ router.post( router.post( "/downloads/clear-all", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { await prisma.downloadJob.updateMany({ where: { @@ -228,7 +242,7 @@ router.post( }); res.json({ success: true }); } catch (error: any) { - console.error("Error clearing all downloads:", error); + logger.error("Error clearing all downloads:", error); res.status(500).json({ error: "Failed to clear all downloads" }); } } @@ -241,7 +255,7 @@ router.post( router.post( "/downloads/:id/retry", requireAuth, - async (req: AuthenticatedRequest, res: Response) => { + async (req: Request, res: Response) => { try { // Get the failed download const failedJob = await prisma.downloadJob.findFirst({ @@ -478,11 +492,9 @@ router.post( const albumTitle = metadata.albumTitle as string; if (!artistName || !albumTitle) { - return res - .status(400) - .json({ - error: "Cannot retry: missing artist/album info", - }); + return res.status(400).json({ + error: "Cannot retry: missing artist/album info", + }); } // Mark old job as cleared @@ -546,13 +558,13 @@ router.post( }, ]; - console.log( + logger.debug( `[Retry] Trying Soulseek for ${artistName} - ${albumTitle}` ); // Run Soulseek search async soulseekService - .searchAndDownloadBatch(tracks, musicPath, 4) + .searchAndDownloadBatch(tracks, musicPath, settings?.soulseekConcurrentDownloads || 4) .then(async (result) => { if (result.successful > 0) { await prisma.downloadJob.update({ @@ -569,7 +581,7 @@ router.post( }, }, }); - console.log( + logger.debug( `[Retry] ✓ Soulseek downloaded ${result.successful} tracks for ${artistName} - ${albumTitle}` ); @@ -585,7 +597,7 @@ router.post( }); } else { // Soulseek failed, try Lidarr if we have an MBID - console.log( + logger.debug( `[Retry] Soulseek failed, trying Lidarr for ${artistName} - ${albumTitle}` ); @@ -631,7 +643,7 @@ router.post( } }) .catch(async (error) => { - console.error(`[Retry] Soulseek error:`, error); + logger.error(`[Retry] Soulseek error:`, error); await prisma.downloadJob.update({ where: { id: newJobRecord.id }, data: { @@ -676,7 +688,7 @@ router.post( artistMbid: failedJob.artistMbid, subject: failedJob.subject, status: "pending", - metadata: metadata || {}, + metadata: (metadata || {}) as any, }, }); @@ -702,7 +714,7 @@ router.post( error: result.error, }); } catch (error: any) { - console.error("Error retrying download:", error); + logger.error("Error retrying download:", error); res.status(500).json({ error: "Failed to retry download" }); } } diff --git a/backend/src/routes/offline.ts b/backend/src/routes/offline.ts index 04e139c..4091d4f 100644 --- a/backend/src/routes/offline.ts +++ b/backend/src/routes/offline.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; import { z } from "zod"; @@ -19,12 +20,12 @@ router.post("/albums/:id/download", async (req, res) => { const { quality } = downloadAlbumSchema.parse(req.body); // Get user's default quality if not specified - let selectedQuality = quality; - if (!selectedQuality) { + let selectedQuality: "original" | "high" | "medium" | "low" = quality || "medium"; + if (!quality) { const settings = await prisma.userSettings.findUnique({ where: { userId }, }); - selectedQuality = (settings?.playbackQuality as any) || "medium"; + selectedQuality = (settings?.playbackQuality as "original" | "high" | "medium" | "low") || "medium"; } // Get album with tracks @@ -103,7 +104,7 @@ router.post("/albums/:id/download", async (req, res) => { .status(400) .json({ error: "Invalid request", details: error.errors }); } - console.error("Create download job error:", error); + logger.error("Create download job error:", error); res.status(500).json({ error: "Failed to create download job" }); } }); @@ -145,7 +146,7 @@ router.post("/tracks/:id/complete", async (req, res) => { res.json(cachedTrack); } catch (error) { - console.error("Complete track download error:", error); + logger.error("Complete track download error:", error); res.status(500).json({ error: "Failed to complete download" }); } }); @@ -209,7 +210,7 @@ router.get("/albums", async (req, res) => { res.json(albums); } catch (error) { - console.error("Get cached albums error:", error); + logger.error("Get cached albums error:", error); res.status(500).json({ error: "Failed to get cached albums" }); } }); @@ -245,7 +246,7 @@ router.delete("/albums/:id", async (req, res) => { deletedCount: cachedTracks.length, }); } catch (error) { - console.error("Delete cached album error:", error); + logger.error("Delete cached album error:", error); res.status(500).json({ error: "Failed to delete cached album" }); } }); @@ -278,7 +279,7 @@ router.get("/stats", async (req, res) => { trackCount, }); } catch (error) { - console.error("Get cache stats error:", error); + logger.error("Get cache stats error:", error); res.status(500).json({ error: "Failed to get cache stats" }); } }); diff --git a/backend/src/routes/onboarding.ts b/backend/src/routes/onboarding.ts index edca0cd..012b56c 100644 --- a/backend/src/routes/onboarding.ts +++ b/backend/src/routes/onboarding.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; import bcrypt from "bcrypt"; import { z } from "zod"; @@ -49,14 +50,14 @@ async function ensureEncryptionKey(): Promise { process.env.SETTINGS_ENCRYPTION_KEY !== "default-encryption-key-change-me" ) { - console.log("[ONBOARDING] Encryption key already exists"); + logger.debug("[ONBOARDING] Encryption key already exists"); return; } // Generate a secure 32-byte encryption key const encryptionKey = crypto.randomBytes(32).toString("base64"); - console.log( + logger.debug( "[ONBOARDING] Generating encryption key for settings security..." ); @@ -69,9 +70,9 @@ async function ensureEncryptionKey(): Promise { // Update the process environment so it's available immediately process.env.SETTINGS_ENCRYPTION_KEY = encryptionKey; - console.log("[ONBOARDING] Encryption key generated and saved to .env"); + logger.debug("[ONBOARDING] Encryption key generated and saved to .env"); } catch (error) { - console.error("[ONBOARDING] ✗ Failed to save encryption key:", error); + logger.error("[ONBOARDING] Failed to save encryption key:", error); throw new Error("Failed to generate encryption key"); } } @@ -82,7 +83,7 @@ async function ensureEncryptionKey(): Promise { */ router.post("/register", async (req, res) => { try { - console.log("[ONBOARDING] Register attempt for user:", req.body?.username); + logger.debug("[ONBOARDING] Register attempt for user:", req.body?.username); const { username, password } = registerSchema.parse(req.body); // Check if any user exists (first user becomes admin) @@ -100,7 +101,7 @@ router.post("/register", async (req, res) => { }); if (existing) { - console.log("[ONBOARDING] Username already taken:", username); + logger.debug("[ONBOARDING] Username already taken:", username); return res.status(400).json({ error: "Username already taken" }); } @@ -131,9 +132,10 @@ router.post("/register", async (req, res) => { id: user.id, username: user.username, role: user.role, + tokenVersion: user.tokenVersion, }); - console.log("[ONBOARDING] User created successfully:", user.username); + logger.debug("[ONBOARDING] User created successfully:", user.username); res.json({ token, user: { @@ -145,12 +147,12 @@ router.post("/register", async (req, res) => { }); } catch (err: any) { if (err instanceof z.ZodError) { - console.error("[ONBOARDING] Validation error:", err.errors); + logger.error("[ONBOARDING] Validation error:", err.errors); return res .status(400) .json({ error: "Invalid request", details: err.errors }); } - console.error("Registration error:", err); + logger.error("Registration error:", err); res.status(500).json({ error: "Failed to create account" }); } }); @@ -189,10 +191,10 @@ router.post("/lidarr", requireAuth, async (req, res) => { if (response.status === 200) { connectionTested = true; - console.log("Lidarr connection test successful"); + logger.debug("Lidarr connection test successful"); } } catch (error: any) { - console.warn( + logger.warn( " Lidarr connection test failed (saved anyway):", error.message ); @@ -229,7 +231,7 @@ router.post("/lidarr", requireAuth, async (req, res) => { .status(400) .json({ error: "Invalid request", details: err.errors }); } - console.error("Lidarr config error:", err); + logger.error("Lidarr config error:", err); res.status(500).json({ error: "Failed to save configuration" }); } }); @@ -265,10 +267,10 @@ router.post("/audiobookshelf", requireAuth, async (req, res) => { if (response.status === 200) { connectionTested = true; - console.log("Audiobookshelf connection test successful"); + logger.debug("Audiobookshelf connection test successful"); } } catch (error: any) { - console.warn( + logger.warn( " Audiobookshelf connection test failed (saved anyway):", error.message ); @@ -305,7 +307,7 @@ router.post("/audiobookshelf", requireAuth, async (req, res) => { .status(400) .json({ error: "Invalid request", details: err.errors }); } - console.error("Audiobookshelf config error:", err); + logger.error("Audiobookshelf config error:", err); res.status(500).json({ error: "Failed to save configuration" }); } }); @@ -363,7 +365,7 @@ router.post("/soulseek", requireAuth, async (req, res) => { .status(400) .json({ error: "Invalid request", details: err.errors }); } - console.error("Soulseek config error:", err); + logger.error("Soulseek config error:", err); res.status(500).json({ error: "Failed to save configuration" }); } }); @@ -394,7 +396,7 @@ router.post("/enrichment", requireAuth, async (req, res) => { .status(400) .json({ error: "Invalid request", details: err.errors }); } - console.error("Enrichment config error:", err); + logger.error("Enrichment config error:", err); res.status(500).json({ error: "Failed to save configuration" }); } }); @@ -410,10 +412,10 @@ router.post("/complete", requireAuth, async (req, res) => { data: { onboardingComplete: true }, }); - console.log("[ONBOARDING] User completed onboarding:", req.user!.id); + logger.debug("[ONBOARDING] User completed onboarding:", req.user!.id); res.json({ success: true }); } catch (err: any) { - console.error("Onboarding complete error:", err); + logger.error("Onboarding complete error:", err); res.status(500).json({ error: "Failed to complete onboarding" }); } }); @@ -467,7 +469,7 @@ router.get("/status", async (req, res) => { }); } } catch (err: any) { - console.error("Onboarding status error:", err); + logger.error("Onboarding status error:", err); res.status(500).json({ error: "Failed to check status" }); } }); diff --git a/backend/src/routes/playbackState.ts b/backend/src/routes/playbackState.ts index b4245e2..098e17d 100644 --- a/backend/src/routes/playbackState.ts +++ b/backend/src/routes/playbackState.ts @@ -1,5 +1,6 @@ import express from "express"; -import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; +import { prisma, Prisma } from "../utils/db"; import { requireAuth } from "../middleware/auth"; const router = express.Router(); @@ -19,7 +20,7 @@ router.get("/", requireAuth, async (req, res) => { res.json(playbackState); } catch (error) { - console.error("Get playback state error:", error); + logger.error("Get playback state error:", error); res.status(500).json({ error: "Failed to get playback state" }); } }); @@ -46,7 +47,7 @@ router.post("/", requireAuth, async (req, res) => { // Validate playback type const validPlaybackTypes = ["track", "audiobook", "podcast"]; if (!validPlaybackTypes.includes(playbackType)) { - console.warn(`[PlaybackState] Invalid playbackType: ${playbackType}`); + logger.warn(`[PlaybackState] Invalid playbackType: ${playbackType}`); return res.status(400).json({ error: "Invalid playbackType" }); } @@ -79,7 +80,7 @@ router.post("/", requireAuth, async (req, res) => { safeQueue = null; } } catch (sanitizeError: any) { - console.error("[PlaybackState] Queue sanitization failed:", sanitizeError?.message); + logger.error("[PlaybackState] Queue sanitization failed:", sanitizeError?.message); safeQueue = null; // Fall back to null queue } } @@ -96,7 +97,7 @@ router.post("/", requireAuth, async (req, res) => { trackId: trackId || null, audiobookId: audiobookId || null, podcastId: podcastId || null, - queue: safeQueue, + queue: safeQueue === null ? Prisma.DbNull : safeQueue, currentIndex: safeCurrentIndex, isShuffle: isShuffle || false, }, @@ -106,7 +107,7 @@ router.post("/", requireAuth, async (req, res) => { trackId: trackId || null, audiobookId: audiobookId || null, podcastId: podcastId || null, - queue: safeQueue, + queue: safeQueue === null ? Prisma.DbNull : safeQueue, currentIndex: safeCurrentIndex, isShuffle: isShuffle || false, }, @@ -114,13 +115,13 @@ router.post("/", requireAuth, async (req, res) => { res.json(playbackState); } catch (error: any) { - console.error("[PlaybackState] Error saving state:", error?.message || error); - console.error("[PlaybackState] Full error:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2)); + logger.error("[PlaybackState] Error saving state:", error?.message || error); + logger.error("[PlaybackState] Full error:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2)); if (error?.code) { - console.error("[PlaybackState] Error code:", error.code); + logger.error("[PlaybackState] Error code:", error.code); } if (error?.meta) { - console.error("[PlaybackState] Prisma meta:", error.meta); + logger.error("[PlaybackState] Prisma meta:", error.meta); } // Return more specific error for debugging res.status(500).json({ @@ -141,7 +142,7 @@ router.delete("/", requireAuth, async (req, res) => { res.json({ success: true }); } catch (error) { - console.error("Delete playback state error:", error); + logger.error("Delete playback state error:", error); res.status(500).json({ error: "Failed to delete playback state" }); } }); diff --git a/backend/src/routes/playlists.ts b/backend/src/routes/playlists.ts index 791e683..e171fa3 100644 --- a/backend/src/routes/playlists.ts +++ b/backend/src/routes/playlists.ts @@ -1,7 +1,8 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; +import { z } from "zod"; import { requireAuthOrToken } from "../middleware/auth"; import { prisma } from "../utils/db"; -import { z } from "zod"; import { sessionLog } from "../utils/playlistLogger"; const router = Router(); @@ -20,6 +21,9 @@ const addTrackSchema = z.object({ // GET /playlists router.get("/", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; // Get user's hidden playlists @@ -74,11 +78,11 @@ router.get("/", async (req, res) => { // Debug: log shared playlists with user info const sharedPlaylists = playlistsWithCounts.filter((p) => !p.isOwner); if (sharedPlaylists.length > 0) { - console.log( + logger.debug( `[Playlists] Found ${sharedPlaylists.length} shared playlists for user ${userId}:` ); sharedPlaylists.forEach((p) => { - console.log( + logger.debug( ` - "${p.name}" by ${ p.user?.username || "UNKNOWN" } (owner: ${p.userId})` @@ -88,7 +92,7 @@ router.get("/", async (req, res) => { res.json(playlistsWithCounts); } catch (error) { - console.error("Get playlists error:", error); + logger.error("Get playlists error:", error); res.status(500).json({ error: "Failed to get playlists" }); } }); @@ -96,6 +100,9 @@ router.get("/", async (req, res) => { // POST /playlists router.post("/", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; const data = createPlaylistSchema.parse(req.body); @@ -114,7 +121,7 @@ router.post("/", async (req, res) => { .status(400) .json({ error: "Invalid request", details: error.errors }); } - console.error("Create playlist error:", error); + logger.error("Create playlist error:", error); res.status(500).json({ error: "Failed to create playlist" }); } }); @@ -122,6 +129,9 @@ router.post("/", async (req, res) => { // GET /playlists/:id router.get("/:id", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; const playlist = await prisma.playlist.findUnique({ @@ -132,6 +142,10 @@ router.get("/:id", async (req, res) => { username: true, }, }, + hiddenByUsers: { + where: { userId }, + select: { id: true }, + }, items: { include: { track: { @@ -203,6 +217,7 @@ router.get("/:id", async (req, res) => { res.json({ ...playlist, isOwner: playlist.userId === userId, + isHidden: playlist.hiddenByUsers.length > 0, trackCount: playlist.items.length, pendingCount: playlist.pendingTracks.length, items: formattedItems, @@ -210,7 +225,7 @@ router.get("/:id", async (req, res) => { mergedItems, }); } catch (error) { - console.error("Get playlist error:", error); + logger.error("Get playlist error:", error); res.status(500).json({ error: "Failed to get playlist" }); } }); @@ -218,6 +233,9 @@ router.get("/:id", async (req, res) => { // PUT /playlists/:id router.put("/:id", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; const data = createPlaylistSchema.parse(req.body); @@ -249,7 +267,7 @@ router.put("/:id", async (req, res) => { .status(400) .json({ error: "Invalid request", details: error.errors }); } - console.error("Update playlist error:", error); + logger.error("Update playlist error:", error); res.status(500).json({ error: "Failed to update playlist" }); } }); @@ -257,6 +275,9 @@ router.put("/:id", async (req, res) => { // POST /playlists/:id/hide - Hide any playlist from your view router.post("/:id/hide", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; const playlistId = req.params.id; @@ -285,7 +306,7 @@ router.post("/:id/hide", async (req, res) => { res.json({ message: "Playlist hidden", isHidden: true }); } catch (error) { - console.error("Hide playlist error:", error); + logger.error("Hide playlist error:", error); res.status(500).json({ error: "Failed to hide playlist" }); } }); @@ -293,6 +314,9 @@ router.post("/:id/hide", async (req, res) => { // DELETE /playlists/:id/hide - Unhide a shared playlist router.delete("/:id/hide", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; const playlistId = req.params.id; @@ -303,7 +327,7 @@ router.delete("/:id/hide", async (req, res) => { res.json({ message: "Playlist unhidden", isHidden: false }); } catch (error) { - console.error("Unhide playlist error:", error); + logger.error("Unhide playlist error:", error); res.status(500).json({ error: "Failed to unhide playlist" }); } }); @@ -311,6 +335,9 @@ router.delete("/:id/hide", async (req, res) => { // DELETE /playlists/:id router.delete("/:id", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; // Check ownership @@ -332,7 +359,7 @@ router.delete("/:id", async (req, res) => { res.json({ message: "Playlist deleted" }); } catch (error) { - console.error("Delete playlist error:", error); + logger.error("Delete playlist error:", error); res.status(500).json({ error: "Failed to delete playlist" }); } }); @@ -340,6 +367,7 @@ router.delete("/:id", async (req, res) => { // POST /playlists/:id/items router.post("/:id/items", async (req, res) => { try { + if (!req.user) return res.status(401).json({ error: "Unauthorized" }); const userId = req.user.id; const parsedBody = addTrackSchema.safeParse(req.body); if (!parsedBody.success) { @@ -425,7 +453,7 @@ router.post("/:id/items", async (req, res) => { .status(400) .json({ error: "Invalid request", details: error.errors }); } - console.error("Add track to playlist error:", error); + logger.error("Add track to playlist error:", error); res.status(500).json({ error: "Failed to add track to playlist" }); } }); @@ -433,7 +461,7 @@ router.post("/:id/items", async (req, res) => { // DELETE /playlists/:id/items/:trackId router.delete("/:id/items/:trackId", async (req, res) => { try { - const userId = req.user.id; + const userId = req.user!.id; // Check ownership const playlist = await prisma.playlist.findUnique({ @@ -459,7 +487,7 @@ router.delete("/:id/items/:trackId", async (req, res) => { res.json({ message: "Track removed from playlist" }); } catch (error) { - console.error("Remove track from playlist error:", error); + logger.error("Remove track from playlist error:", error); res.status(500).json({ error: "Failed to remove track from playlist" }); } }); @@ -467,7 +495,7 @@ router.delete("/:id/items/:trackId", async (req, res) => { // PUT /playlists/:id/items/reorder router.put("/:id/items/reorder", async (req, res) => { try { - const userId = req.user.id; + const userId = req.user!.id; const { trackIds } = req.body; // Array of track IDs in new order if (!Array.isArray(trackIds)) { @@ -504,7 +532,7 @@ router.put("/:id/items/reorder", async (req, res) => { res.json({ message: "Playlist reordered" }); } catch (error) { - console.error("Reorder playlist error:", error); + logger.error("Reorder playlist error:", error); res.status(500).json({ error: "Failed to reorder playlist" }); } }); @@ -519,7 +547,7 @@ router.put("/:id/items/reorder", async (req, res) => { */ router.get("/:id/pending", async (req, res) => { try { - const userId = req.user.id; + const userId = req.user!.id; const playlistId = req.params.id; // Check ownership or public access @@ -553,7 +581,7 @@ router.get("/:id/pending", async (req, res) => { spotifyPlaylistId: playlist.spotifyPlaylistId, }); } catch (error) { - console.error("Get pending tracks error:", error); + logger.error("Get pending tracks error:", error); res.status(500).json({ error: "Failed to get pending tracks" }); } }); @@ -564,7 +592,7 @@ router.get("/:id/pending", async (req, res) => { */ router.delete("/:id/pending/:trackId", async (req, res) => { try { - const userId = req.user.id; + const userId = req.user!.id; const { id: playlistId, trackId: pendingTrackId } = req.params; // Check ownership @@ -589,7 +617,7 @@ router.delete("/:id/pending/:trackId", async (req, res) => { if (error.code === "P2025") { return res.status(404).json({ error: "Pending track not found" }); } - console.error("Delete pending track error:", error); + logger.error("Delete pending track error:", error); res.status(500).json({ error: "Failed to delete pending track" }); } }); @@ -632,7 +660,7 @@ router.get("/:id/pending/:trackId/preview", async (req, res) => { res.json({ previewUrl }); } catch (error: any) { - console.error("Get preview URL error:", error); + logger.error("Get preview URL error:", error); res.status(500).json({ error: "Failed to get preview URL" }); } }); @@ -644,7 +672,7 @@ router.get("/:id/pending/:trackId/preview", async (req, res) => { */ router.post("/:id/pending/:trackId/retry", async (req, res) => { try { - const userId = req.user.id; + const userId = req.user!.id; const { id: playlistId, trackId: pendingTrackId } = req.params; sessionLog( @@ -771,7 +799,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { ? pendingTrack.spotifyAlbum : pendingTrack.spotifyArtist; // Use artist as fallback folder name - console.log( + logger.debug( `[Retry] Starting download for: ${pendingTrack.spotifyArtist} - ${pendingTrack.spotifyTitle}` ); sessionLog( @@ -787,7 +815,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { ); if (!searchResult.found || searchResult.allMatches.length === 0) { - console.log(`[Retry] ✗ No results found on Soulseek`); + logger.debug(`[Retry] No results found on Soulseek`); sessionLog("PENDING-RETRY", `No results found on Soulseek`, "INFO"); await prisma.downloadJob.update({ @@ -806,7 +834,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { }); } - console.log( + logger.debug( `[Retry] ✓ Found ${searchResult.allMatches.length} results, starting download in background` ); sessionLog( @@ -833,7 +861,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { ) .then(async (result) => { if (result.success) { - console.log( + logger.debug( `[Retry] ✓ Download complete: ${result.filePath}` ); sessionLog( @@ -870,7 +898,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { removeOnComplete: true, } ); - console.log( + logger.debug( `[Retry] Queued library scan to reconcile pending tracks` ); sessionLog( @@ -880,7 +908,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { })` ); } catch (scanError) { - console.error( + logger.error( `[Retry] Failed to queue scan:`, scanError ); @@ -893,7 +921,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { ); } } else { - console.log(`[Retry] ✗ Download failed: ${result.error}`); + logger.debug(`[Retry] Download failed: ${result.error}`); sessionLog( "PENDING-RETRY", `Download failed: ${result.error || "unknown error"}`, @@ -911,7 +939,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { } }) .catch((error) => { - console.error(`[Retry] Download error:`, error); + logger.error(`[Retry] Download error:`, error); sessionLog( "PENDING-RETRY", `Download exception: ${error?.message || error}`, @@ -930,7 +958,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { .catch(() => undefined); }); } catch (error: any) { - console.error("Retry pending track error:", error); + logger.error("Retry pending track error:", error); sessionLog( "PENDING-RETRY", `Handler error: ${error?.message || error}`, @@ -949,7 +977,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => { */ router.post("/:id/pending/reconcile", async (req, res) => { try { - const userId = req.user.id; + const userId = req.user!.id; const playlistId = req.params.id; // Check ownership @@ -977,7 +1005,7 @@ router.post("/:id/pending/reconcile", async (req, res) => { playlistsUpdated: result.playlistsUpdated, }); } catch (error) { - console.error("Reconcile pending tracks error:", error); + logger.error("Reconcile pending tracks error:", error); res.status(500).json({ error: "Failed to reconcile pending tracks" }); } }); diff --git a/backend/src/routes/plays.ts b/backend/src/routes/plays.ts index 5e1d3e4..e436a1f 100644 --- a/backend/src/routes/plays.ts +++ b/backend/src/routes/plays.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; import { z } from "zod"; @@ -40,7 +41,7 @@ router.post("/", async (req, res) => { .status(400) .json({ error: "Invalid request", details: error.errors }); } - console.error("Create play error:", error); + logger.error("Create play error:", error); res.status(500).json({ error: "Failed to log play" }); } }); @@ -76,7 +77,7 @@ router.get("/", async (req, res) => { res.json(plays); } catch (error) { - console.error("Get plays error:", error); + logger.error("Get plays error:", error); res.status(500).json({ error: "Failed to get plays" }); } }); diff --git a/backend/src/routes/podcasts.ts b/backend/src/routes/podcasts.ts index 47b2899..1f4d742 100644 --- a/backend/src/routes/podcasts.ts +++ b/backend/src/routes/podcasts.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth, requireAuthOrToken } from "../middleware/auth"; import { prisma } from "../utils/db"; import { rssParserService } from "../services/rss-parser"; @@ -16,7 +17,7 @@ const router = Router(); router.post("/sync-covers", requireAuth, async (req, res) => { try { const { notificationService } = await import("../services/notificationService"); - console.log(" Starting podcast cover sync..."); + logger.debug(" Starting podcast cover sync..."); const podcastResult = await podcastCacheService.syncAllCovers(); const episodeResult = await podcastCacheService.syncEpisodeCovers(); @@ -25,7 +26,7 @@ router.post("/sync-covers", requireAuth, async (req, res) => { await notificationService.notifySystem( req.user!.id, "Podcast Covers Synced", - `Synced ${podcastResult.cached || 0} podcast covers and ${episodeResult.cached || 0} episode covers` + `Synced ${podcastResult.synced || 0} podcast covers and ${episodeResult.synced || 0} episode covers` ); res.json({ @@ -34,7 +35,7 @@ router.post("/sync-covers", requireAuth, async (req, res) => { episodes: episodeResult, }); } catch (error: any) { - console.error("Podcast cover sync failed:", error); + logger.error("Podcast cover sync failed:", error); res.status(500).json({ error: "Sync failed", message: error.message, @@ -110,7 +111,7 @@ router.get("/", async (req, res) => { res.json(podcasts); } catch (error: any) { - console.error("Error fetching podcasts:", error); + logger.error("Error fetching podcasts:", error); res.status(500).json({ error: "Failed to fetch podcasts", message: error.message, @@ -127,7 +128,7 @@ router.get("/discover/top", requireAuthOrToken, async (req, res) => { const { limit = "20" } = req.query; const podcastLimit = Math.min(parseInt(limit as string, 10), 50); - console.log(`\n[TOP PODCASTS] Request (limit: ${podcastLimit})`); + logger.debug(`\n[TOP PODCASTS] Request (limit: ${podcastLimit})`); // Simple iTunes search - same as the working search bar! const itunesResponse = await axios.get( @@ -155,10 +156,10 @@ router.get("/discover/top", requireAuthOrToken, async (req, res) => { isExternal: true, })); - console.log(` Found ${podcasts.length} podcasts`); + logger.debug(` Found ${podcasts.length} podcasts`); res.json(podcasts); } catch (error: any) { - console.error("Error fetching top podcasts:", error); + logger.error("Error fetching top podcasts:", error); res.status(500).json({ error: "Failed to fetch top podcasts", message: error.message, @@ -174,7 +175,7 @@ router.get("/discover/genres", async (req, res) => { try { const { genres } = req.query; // Comma-separated genre IDs - console.log(`\n[GENRE PODCASTS] Request (genres: ${genres})`); + logger.debug(`\n[GENRE PODCASTS] Request (genres: ${genres})`); if (!genres || typeof genres !== "string") { return res.status(400).json({ @@ -198,7 +199,7 @@ router.get("/discover/genres", async (req, res) => { // Fetch podcasts for each genre using simple iTunes search - PARALLEL execution const genreFetchPromises = genreIds.map(async (genreId) => { const searchTerm = genreSearchTerms[genreId] || "podcast"; - console.log(` Searching for "${searchTerm}"...`); + logger.debug(` Searching for "${searchTerm}"...`); try { // Simple iTunes search - same as the working search bar! @@ -230,12 +231,12 @@ router.get("/discover/genres", async (req, res) => { }) ); - console.log( + logger.debug( ` Found ${podcasts.length} podcasts for genre ${genreId}` ); return { genreId, podcasts }; } catch (error: any) { - console.error( + logger.error( ` Error searching for ${searchTerm}:`, error.message ); @@ -252,12 +253,12 @@ router.get("/discover/genres", async (req, res) => { results[genreId] = podcasts; } - console.log( + logger.debug( ` Fetched podcasts for ${genreIds.length} genres (parallel)` ); res.json(results); } catch (error: any) { - console.error("Error fetching genre podcasts:", error); + logger.error("Error fetching genre podcasts:", error); res.status(500).json({ error: "Failed to fetch genre podcasts", message: error.message, @@ -277,7 +278,7 @@ router.get("/discover/genre/:genreId", async (req, res) => { const podcastLimit = Math.min(parseInt(limit as string, 10), 50); const podcastOffset = parseInt(offset as string, 10); - console.log( + logger.debug( `\n[GENRE PAGINATED] Request (genre: ${genreId}, limit: ${podcastLimit}, offset: ${podcastOffset})` ); @@ -293,7 +294,7 @@ router.get("/discover/genre/:genreId", async (req, res) => { }; const searchTerm = genreSearchTerms[genreId] || "podcast"; - console.log( + logger.debug( ` Searching for "${searchTerm}" (offset: ${podcastOffset})...` ); @@ -332,12 +333,12 @@ router.get("/discover/genre/:genreId", async (req, res) => { podcastOffset + podcastLimit ); - console.log( + logger.debug( ` Found ${podcasts.length} podcasts (total available: ${allPodcasts.length})` ); res.json(podcasts); } catch (error: any) { - console.error("Error fetching paginated genre podcasts:", error); + logger.error("Error fetching paginated genre podcasts:", error); res.status(500).json({ error: "Failed to fetch podcasts", message: error.message, @@ -354,7 +355,7 @@ router.get("/preview/:itunesId", async (req, res) => { try { const { itunesId } = req.params; - console.log(`\n [PODCAST PREVIEW] iTunes ID: ${itunesId}`); + logger.debug(`\n [PODCAST PREVIEW] iTunes ID: ${itunesId}`); // Try to fetch from iTunes API const itunesResponse = await axios.get( @@ -406,7 +407,7 @@ router.get("/preview/:itunesId", async (req, res) => { podcastData.feedUrl ); description = - feedData.description || feedData.itunes?.summary || ""; + feedData.podcast.description || ""; // Get first 3 episodes for preview previewEpisodes = (feedData.episodes || []) @@ -417,11 +418,11 @@ router.get("/preview/:itunesId", async (req, res) => { duration: episode.duration || 0, })); - console.log( + logger.debug( ` [PODCAST PREVIEW] Fetched description (${description.length} chars) and ${previewEpisodes.length} preview episodes` ); } catch (error) { - console.warn(` Failed to fetch RSS feed for preview:`, error); + logger.warn(` Failed to fetch RSS feed for preview:`, error); // Continue without description and episodes } } @@ -440,7 +441,7 @@ router.get("/preview/:itunesId", async (req, res) => { subscribedPodcastId: isSubscribed ? existingPodcast!.id : null, }); } catch (error: any) { - console.error("Error previewing podcast:", error); + logger.error("Error previewing podcast:", error); res.status(500).json({ error: "Failed to preview podcast", message: error.message, @@ -532,7 +533,7 @@ router.get("/:id", async (req, res) => { isSubscribed: true, }); } catch (error: any) { - console.error("Error fetching podcast:", error); + logger.error("Error fetching podcast:", error); res.status(500).json({ error: "Failed to fetch podcast", message: error.message, @@ -554,17 +555,17 @@ router.post("/subscribe", async (req, res) => { .json({ error: "feedUrl or itunesId is required" }); } - console.log( + logger.debug( `\n [PODCAST] Subscribe request from ${req.user!.username}` ); - console.log(` Feed URL: ${feedUrl || "N/A"}`); - console.log(` iTunes ID: ${itunesId || "N/A"}`); + logger.debug(` Feed URL: ${feedUrl || "N/A"}`); + logger.debug(` iTunes ID: ${itunesId || "N/A"}`); let finalFeedUrl = feedUrl; // If only iTunes ID provided, fetch feed URL from iTunes API if (!finalFeedUrl && itunesId) { - console.log(` Looking up feed URL from iTunes...`); + logger.debug(` Looking up feed URL from iTunes...`); const itunesResponse = await axios.get( "https://itunes.apple.com/lookup", { @@ -582,7 +583,7 @@ router.post("/subscribe", async (req, res) => { } finalFeedUrl = itunesResponse.data.results[0].feedUrl; - console.log(` Found feed URL: ${finalFeedUrl}`); + logger.debug(` Found feed URL: ${finalFeedUrl}`); } // Check if podcast already exists in database @@ -591,7 +592,7 @@ router.post("/subscribe", async (req, res) => { }); if (podcast) { - console.log(` Podcast exists in database: ${podcast.title}`); + logger.debug(` Podcast exists in database: ${podcast.title}`); // Check if user is already subscribed const existingSubscription = @@ -605,7 +606,7 @@ router.post("/subscribe", async (req, res) => { }); if (existingSubscription) { - console.log(` User already subscribed`); + logger.debug(` User already subscribed`); return res.json({ success: true, podcast: { @@ -624,7 +625,7 @@ router.post("/subscribe", async (req, res) => { }, }); - console.log(` User subscribed to existing podcast`); + logger.debug(` User subscribed to existing podcast`); return res.json({ success: true, podcast: { @@ -636,14 +637,14 @@ router.post("/subscribe", async (req, res) => { } // Parse RSS feed to get podcast and episodes - console.log(` Parsing RSS feed...`); + logger.debug(` Parsing RSS feed...`); const { podcast: podcastData, episodes } = await rssParserService.parseFeed(finalFeedUrl); // Create podcast in database - console.log(` Saving podcast to database...`); + logger.debug(` Saving podcast to database...`); const finalItunesId = itunesId || podcastData.itunesId; - console.log(` iTunes ID to save: ${finalItunesId || "NONE"}`); + logger.debug(` iTunes ID to save: ${finalItunesId || "NONE"}`); podcast = await prisma.podcast.create({ data: { @@ -659,11 +660,11 @@ router.post("/subscribe", async (req, res) => { }, }); - console.log(` Podcast created: ${podcast.id}`); - console.log(` iTunes ID saved: ${podcast.itunesId || "NONE"}`); + logger.debug(` Podcast created: ${podcast.id}`); + logger.debug(` iTunes ID saved: ${podcast.itunesId || "NONE"}`); // Save episodes - console.log(` Saving ${episodes.length} episodes...`); + logger.debug(` Saving ${episodes.length} episodes...`); await prisma.podcastEpisode.createMany({ data: episodes.map((ep) => ({ podcastId: podcast!.id, @@ -682,7 +683,7 @@ router.post("/subscribe", async (req, res) => { skipDuplicates: true, }); - console.log(` Episodes saved`); + logger.debug(` Episodes saved`); // Subscribe user await prisma.podcastSubscription.create({ @@ -692,7 +693,7 @@ router.post("/subscribe", async (req, res) => { }, }); - console.log(` User subscribed successfully`); + logger.debug(` User subscribed successfully`); res.json({ success: true, @@ -703,7 +704,7 @@ router.post("/subscribe", async (req, res) => { message: "Subscribed successfully", }); } catch (error: any) { - console.error("Error subscribing to podcast:", error); + logger.error("Error subscribing to podcast:", error); res.status(500).json({ error: "Failed to subscribe to podcast", message: error.message, @@ -719,9 +720,9 @@ router.delete("/:id/unsubscribe", async (req, res) => { try { const { id } = req.params; - console.log(`\n[PODCAST] Unsubscribe request`); - console.log(` User: ${req.user!.username}`); - console.log(` Podcast ID: ${id}`); + logger.debug(`\n[PODCAST] Unsubscribe request`); + logger.debug(` User: ${req.user!.username}`); + logger.debug(` Podcast ID: ${id}`); // Delete subscription const deleted = await prisma.podcastSubscription.deleteMany({ @@ -757,14 +758,14 @@ router.delete("/:id/unsubscribe", async (req, res) => { }, }); - console.log(` Unsubscribed successfully`); + logger.debug(` Unsubscribed successfully`); res.json({ success: true, message: "Unsubscribed successfully", }); } catch (error: any) { - console.error("Error unsubscribing from podcast:", error); + logger.error("Error unsubscribing from podcast:", error); res.status(500).json({ error: "Failed to unsubscribe", message: error.message, @@ -780,8 +781,8 @@ router.get("/:id/refresh", async (req, res) => { try { const { id } = req.params; - console.log(`\n [PODCAST] Refresh request`); - console.log(` Podcast ID: ${id}`); + logger.debug(`\n [PODCAST] Refresh request`); + logger.debug(` Podcast ID: ${id}`); const podcast = await prisma.podcast.findUnique({ where: { id }, @@ -792,7 +793,7 @@ router.get("/:id/refresh", async (req, res) => { } // Parse RSS feed - console.log(` Parsing RSS feed...`); + logger.debug(` Parsing RSS feed...`); const { podcast: podcastData, episodes } = await rssParserService.parseFeed(podcast.feedUrl); @@ -844,7 +845,7 @@ router.get("/:id/refresh", async (req, res) => { } } - console.log( + logger.debug( ` Refresh complete. ${newEpisodesCount} new episodes added.` ); @@ -855,7 +856,7 @@ router.get("/:id/refresh", async (req, res) => { message: `Found ${newEpisodesCount} new episodes`, }); } catch (error: any) { - console.error("Error refreshing podcast:", error); + logger.error("Error refreshing podcast:", error); res.status(500).json({ error: "Failed to refresh podcast", message: error.message, @@ -888,7 +889,7 @@ router.get("/:podcastId/episodes/:episodeId/cache-status", async (req, res) => { path: cachedPath ? true : false, // Don't expose actual path }); } catch (error: any) { - console.error("[PODCAST] Cache status check failed:", error); + logger.error("[PODCAST] Cache status check failed:", error); res.status(500).json({ error: "Failed to check cache status" }); } }); @@ -904,12 +905,12 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { const userId = req.user?.id; const podcastDebug = process.env.PODCAST_DEBUG === "1"; - console.log(`\n [PODCAST STREAM] Request:`); - console.log(` Podcast ID: ${podcastId}`); - console.log(` Episode ID: ${episodeId}`); + logger.debug(`\n [PODCAST STREAM] Request:`); + logger.debug(` Podcast ID: ${podcastId}`); + logger.debug(` Episode ID: ${episodeId}`); if (podcastDebug) { - console.log(` Range: ${req.headers.range || "none"}`); - console.log(` UA: ${req.headers["user-agent"] || "unknown"}`); + logger.debug(` Range: ${req.headers.range || "none"}`); + logger.debug(` UA: ${req.headers["user-agent"] || "unknown"}`); } const episode = await prisma.podcastEpisode.findUnique({ @@ -921,10 +922,10 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { } if (podcastDebug) { - console.log(` Episode DB: title="${episode.title}"`); - console.log(` Episode DB: guid="${episode.guid}"`); - console.log(` Episode DB: audioUrl="${episode.audioUrl}"`); - console.log(` Episode DB: mimeType="${episode.mimeType || "unknown"}" fileSize=${episode.fileSize || 0}`); + logger.debug(` Episode DB: title="${episode.title}"`); + logger.debug(` Episode DB: guid="${episode.guid}"`); + logger.debug(` Episode DB: audioUrl="${episode.audioUrl}"`); + logger.debug(` Episode DB: mimeType="${episode.mimeType || "unknown"}" fileSize=${episode.fileSize || 0}`); } const range = req.headers.range; @@ -937,12 +938,12 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { const cachedPath = await getCachedFilePath(episodeId); if (cachedPath) { - console.log(` Streaming from cache: ${cachedPath}`); + logger.debug(` Streaming from cache: ${cachedPath}`); try { const stats = await fs.promises.stat(cachedPath); const fileSize = stats.size; if (podcastDebug) { - console.log(` Cache file size: ${fileSize}`); + logger.debug(` Cache file size: ${fileSize}`); } if (fileSize === 0) { @@ -958,7 +959,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { // Validate range bounds if (start >= fileSize) { - console.log( + logger.debug( ` Range start ${start} >= file size ${fileSize}, clamping to EOF` ); // Browsers can occasionally request a range start beyond EOF during media seeking. @@ -987,7 +988,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { }); fileStream.pipe(res); fileStream.on("error", (err) => { - console.error(" Cache stream error:", err); + logger.error(" Cache stream error:", err); if (!res.headersSent) { res.status(500).json({ error: "Failed to stream episode", @@ -1002,7 +1003,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { const validEnd = Math.min(end, fileSize - 1); const chunkSize = validEnd - start + 1; - console.log( + logger.debug( ` Serving range: bytes ${start}-${validEnd}/${fileSize}` ); @@ -1029,7 +1030,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { }); fileStream.pipe(res); fileStream.on("error", (err) => { - console.error(" Cache stream error:", err); + logger.error(" Cache stream error:", err); if (!res.headersSent) { res.status(500).json({ error: "Failed to stream episode", @@ -1042,7 +1043,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { } // No range - serve entire file - console.log(` Serving full file: ${fileSize} bytes`); + logger.debug(` Serving full file: ${fileSize} bytes`); res.writeHead(200, { "Content-Type": episode.mimeType || "audio/mpeg", "Content-Length": fileSize, @@ -1061,7 +1062,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { }); fileStream.pipe(res); fileStream.on("error", (err) => { - console.error(" Cache stream error:", err); + logger.error(" Cache stream error:", err); if (!res.headersSent) { res.status(500).json({ error: "Failed to stream episode", @@ -1072,7 +1073,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { }); return; // CRITICAL: Exit after starting cache stream } catch (err: any) { - console.error( + logger.error( " Failed to stream from cache, falling back to RSS:", err.message ); @@ -1082,12 +1083,12 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { // Not cached yet - trigger background download while streaming from RSS if (userId && !isDownloading(episodeId)) { - console.log(` Triggering background download for caching`); + logger.debug(` Triggering background download for caching`); downloadInBackground(episodeId, episode.audioUrl, userId); } // Stream from RSS URL - console.log(` Streaming from RSS: ${episode.audioUrl}`); + logger.debug(` Streaming from RSS: ${episode.audioUrl}`); // Get file size first for proper range handling let fileSize = episode.fileSize; @@ -1104,7 +1105,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { }); } } catch (err) { - console.warn(" Could not get file size via HEAD request"); + logger.warn(" Could not get file size via HEAD request"); } } @@ -1115,7 +1116,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1; const chunkSize = end - start + 1; - console.log(` Range request: bytes=${start}-${end}/${fileSize}`); + logger.debug(` Range request: bytes=${start}-${end}/${fileSize}`); try { // Try range request first @@ -1149,7 +1150,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { } catch (rangeError: any) { // 416 = Range Not Satisfiable - many podcast CDNs don't support range requests // Fall back to streaming the full file and let the browser handle seeking - console.log( + logger.debug( ` Range request failed (${ rangeError.response?.status || rangeError.message }), falling back to full stream` @@ -1183,7 +1184,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { } } else { // No range request - stream entire file - console.log(` Streaming full file`); + logger.debug(` Streaming full file`); const response = await axios.get(episode.audioUrl, { responseType: "stream", @@ -1209,7 +1210,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => { response.data.pipe(res); } } catch (error: any) { - console.error("\n [PODCAST STREAM] Error:", error.message); + logger.error("\n [PODCAST STREAM] Error:", error.message); if (!res.headersSent) { res.status(500).json({ error: "Failed to stream episode", @@ -1228,12 +1229,12 @@ router.post("/:podcastId/episodes/:episodeId/progress", async (req, res) => { const { podcastId, episodeId } = req.params; const { currentTime, duration, isFinished } = req.body; - console.log(`\n [PODCAST PROGRESS] Update:`); - console.log(` User: ${req.user!.username}`); - console.log(` Episode ID: ${episodeId}`); - console.log(` Current Time: ${currentTime}s`); - console.log(` Duration: ${duration}s`); - console.log(` Finished: ${isFinished}`); + logger.debug(`\n [PODCAST PROGRESS] Update:`); + logger.debug(` User: ${req.user!.username}`); + logger.debug(` Episode ID: ${episodeId}`); + logger.debug(` Current Time: ${currentTime}s`); + logger.debug(` Duration: ${duration}s`); + logger.debug(` Finished: ${isFinished}`); const progress = await prisma.podcastProgress.upsert({ where: { @@ -1257,7 +1258,7 @@ router.post("/:podcastId/episodes/:episodeId/progress", async (req, res) => { }, }); - console.log(` Progress saved`); + logger.debug(` Progress saved`); res.json({ success: true, @@ -1271,7 +1272,7 @@ router.post("/:podcastId/episodes/:episodeId/progress", async (req, res) => { }, }); } catch (error: any) { - console.error("Error updating progress:", error); + logger.error("Error updating progress:", error); res.status(500).json({ error: "Failed to update progress", message: error.message, @@ -1287,9 +1288,9 @@ router.delete("/:podcastId/episodes/:episodeId/progress", async (req, res) => { try { const { episodeId } = req.params; - console.log(`\n[PODCAST PROGRESS] Delete:`); - console.log(` User: ${req.user!.username}`); - console.log(` Episode ID: ${episodeId}`); + logger.debug(`\n[PODCAST PROGRESS] Delete:`); + logger.debug(` User: ${req.user!.username}`); + logger.debug(` Episode ID: ${episodeId}`); await prisma.podcastProgress.deleteMany({ where: { @@ -1298,14 +1299,14 @@ router.delete("/:podcastId/episodes/:episodeId/progress", async (req, res) => { }, }); - console.log(` Progress removed`); + logger.debug(` Progress removed`); res.json({ success: true, message: "Progress removed", }); } catch (error: any) { - console.error("Error removing progress:", error); + logger.error("Error removing progress:", error); res.status(500).json({ error: "Failed to remove progress", message: error.message, @@ -1329,7 +1330,7 @@ router.get("/:id/similar", async (req, res) => { return res.status(404).json({ error: "Podcast not found" }); } - console.log(`\n [SIMILAR PODCASTS] Request for: ${podcast.title}`); + logger.debug(`\n [SIMILAR PODCASTS] Request for: ${podcast.title}`); try { // Check cache first @@ -1344,7 +1345,7 @@ router.get("/:id/similar", async (req, res) => { }); if (cachedRecommendations.length > 0) { - console.log( + logger.debug( ` Using ${cachedRecommendations.length} cached recommendations` ); return res.json( @@ -1364,15 +1365,15 @@ router.get("/:id/similar", async (req, res) => { } // Fetch from iTunes Search API - console.log(` Fetching from iTunes Search API...`); + logger.debug(` Fetching from iTunes Search API...`); const { itunesService } = await import("../services/itunes"); const recommendations = await itunesService.getSimilarPodcasts( podcast.title, - podcast.description || undefined, - podcast.author + podcast.description ?? undefined, + podcast.author ?? undefined ); - console.log(` Found ${recommendations.length} similar podcasts`); + logger.debug(` Found ${recommendations.length} similar podcasts`); if (recommendations.length > 0) { // Cache recommendations @@ -1400,7 +1401,7 @@ router.get("/:id/similar", async (req, res) => { })), }); - console.log( + logger.debug( ` Cached ${recommendations.length} recommendations` ); @@ -1420,14 +1421,14 @@ router.get("/:id/similar", async (req, res) => { ); } } catch (error: any) { - console.warn(" iTunes search failed:", error.message); + logger.warn(" iTunes search failed:", error.message); } // No recommendations available - console.log(` No recommendations found`); + logger.debug(` No recommendations found`); res.json([]); } catch (error: any) { - console.error("Error fetching similar podcasts:", error); + logger.error("Error fetching similar podcasts:", error); res.status(500).json({ error: "Failed to fetch similar podcasts", message: error.message, @@ -1488,7 +1489,7 @@ router.get("/:id/cover", async (req, res) => { res.status(404).json({ error: "Cover not found" }); } catch (error: any) { - console.error("Error serving podcast cover:", error); + logger.error("Error serving podcast cover:", error); res.status(500).json({ error: "Failed to serve cover", message: error.message, @@ -1549,7 +1550,7 @@ router.get("/episodes/:episodeId/cover", async (req, res) => { res.status(404).json({ error: "Cover not found" }); } catch (error: any) { - console.error("Error serving episode cover:", error); + logger.error("Error serving episode cover:", error); res.status(500).json({ error: "Failed to serve cover", message: error.message, diff --git a/backend/src/routes/recommendations.ts b/backend/src/routes/recommendations.ts index 478ec54..8c67c47 100644 --- a/backend/src/routes/recommendations.ts +++ b/backend/src/routes/recommendations.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth, requireAuthOrToken } from "../middleware/auth"; import { prisma } from "../utils/db"; import { lastFmService } from "../services/lastfm"; @@ -93,7 +94,7 @@ router.get("/for-you", async (req, res) => { }); const ownedArtistIds = new Set(ownedArtists.map((a) => a.artistId)); - console.log( + logger.debug( `Filtering recommendations: ${ownedArtistIds.size} owned artists to exclude` ); @@ -158,11 +159,11 @@ router.get("/for-you", async (req, res) => { }; }); - console.log( + logger.debug( `Recommendations: Found ${artistsWithMetadata.length} new artists` ); artistsWithMetadata.forEach((a) => { - console.log( + logger.debug( ` ${a.name}: coverArt=${a.coverArt ? "YES" : "NO"}, albums=${ a.albumCount }` @@ -171,7 +172,7 @@ router.get("/for-you", async (req, res) => { res.json({ artists: artistsWithMetadata }); } catch (error) { - console.error("Get recommendations for you error:", error); + logger.error("Get recommendations for you error:", error); res.status(500).json({ error: "Failed to get recommendations" }); } }); @@ -244,7 +245,7 @@ router.get("/", async (req, res) => { recommendations, }); } catch (error) { - console.error("Get recommendations error:", error); + logger.error("Get recommendations error:", error); res.status(500).json({ error: "Failed to get recommendations" }); } }); @@ -363,7 +364,7 @@ router.get("/albums", async (req, res) => { recommendations, }); } catch (error) { - console.error("Get album recommendations error:", error); + logger.error("Get album recommendations error:", error); res.status(500).json({ error: "Failed to get album recommendations", }); @@ -459,7 +460,7 @@ router.get("/tracks", async (req, res) => { recommendations, }); } catch (error) { - console.error("Get track recommendations error:", error); + logger.error("Get track recommendations error:", error); res.status(500).json({ error: "Failed to get track recommendations", }); diff --git a/backend/src/routes/releases.ts b/backend/src/routes/releases.ts index 5dc6e8e..ea330d6 100644 --- a/backend/src/routes/releases.ts +++ b/backend/src/routes/releases.ts @@ -1,6 +1,8 @@ +import { logger } from "../utils/logger"; + /** * Release Radar API - * + * * Provides upcoming and recent releases from: * 1. Lidarr monitored artists (via calendar API) * 2. Similar artists from user's library (Last.fm similar artists) @@ -52,7 +54,7 @@ router.get("/radar", async (req, res) => { const endDate = new Date(now); endDate.setDate(endDate.getDate() + daysAhead); - console.log(`[Releases] Fetching radar: ${daysBack} days back, ${daysAhead} days ahead`); + logger.debug(`[Releases] Fetching radar: ${daysBack} days back, ${daysAhead} days ahead`); // 1. Get releases from Lidarr calendar (monitored artists) const lidarrReleases = await lidarrService.getCalendar(startDate, endDate); @@ -92,8 +94,8 @@ router.get("/radar", async (req, res) => { sa => sa.toArtist.mbid && !monitoredMbids.has(sa.toArtist.mbid) ); - console.log(`[Releases] Found ${lidarrReleases.length} Lidarr releases`); - console.log(`[Releases] Found ${unmonitoredSimilar.length} unmonitored similar artists`); + logger.debug(`[Releases] Found ${lidarrReleases.length} Lidarr releases`); + logger.debug(`[Releases] Found ${unmonitoredSimilar.length} unmonitored similar artists`); // 4. Get albums in library to check what user already has const libraryAlbums = await prisma.album.findMany({ @@ -142,7 +144,7 @@ router.get("/radar", async (req, res) => { res.json(response); } catch (error: any) { - console.error("[Releases] Radar error:", error.message); + logger.error("[Releases] Radar error:", error.message); res.status(500).json({ error: "Failed to fetch release radar" }); } }); @@ -173,7 +175,7 @@ router.get("/upcoming", async (req, res) => { daysAhead, }); } catch (error: any) { - console.error("[Releases] Upcoming error:", error.message); + logger.error("[Releases] Upcoming error:", error.message); res.status(500).json({ error: "Failed to fetch upcoming releases" }); } }); @@ -195,7 +197,6 @@ router.get("/recent", async (req, res) => { // Get library albums to mark what's already downloaded const libraryAlbums = await prisma.album.findMany({ - where: { rgMbid: { not: null } }, select: { rgMbid: true } }); const libraryMbids = new Set(libraryAlbums.map(a => a.rgMbid).filter(Boolean)); @@ -214,7 +215,7 @@ router.get("/recent", async (req, res) => { inLibraryCount: releases.length - notInLibrary.length, }); } catch (error: any) { - console.error("[Releases] Recent error:", error.message); + logger.error("[Releases] Recent error:", error.message); res.status(500).json({ error: "Failed to fetch recent releases" }); } }); @@ -233,24 +234,15 @@ router.post("/download/:albumMbid", async (req, res) => { return res.status(401).json({ error: "Authentication required" }); } - console.log(`[Releases] Download requested for album: ${albumMbid}`); + logger.debug(`[Releases] Download requested for album: ${albumMbid}`); - // Use Lidarr to download the album - const result = await lidarrService.downloadAlbum(albumMbid); - - if (result) { - res.json({ - success: true, - message: "Download started", - albumId: result.id - }); - } else { - res.status(404).json({ - error: "Album not found in Lidarr or download failed" - }); - } + // TODO: Implement downloadAlbum method on LidarrService + // For now, return not implemented error + res.status(501).json({ + error: "Download feature not yet implemented for release radar" + }); } catch (error: any) { - console.error("[Releases] Download error:", error.message); + logger.error("[Releases] Download error:", error.message); res.status(500).json({ error: "Failed to start download" }); } }); diff --git a/backend/src/routes/search.ts b/backend/src/routes/search.ts index 2d901da..b5bad71 100644 --- a/backend/src/routes/search.ts +++ b/backend/src/routes/search.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; import { audiobookshelfService } from "../services/audiobookshelf"; @@ -33,7 +34,7 @@ router.use(requireAuth); * name: type * schema: * type: string - * enum: [all, artists, albums, tracks, audiobooks, podcasts] + * enum: [all, artists, albums, tracks, audiobooks, podcasts, episodes] * description: Type of content to search * default: all * - in: query @@ -102,11 +103,13 @@ router.get("/", async (req, res) => { } // Check cache for library search (short TTL since library can change) - const cacheKey = `search:library:${type}:${genre || ""}:${query}:${searchLimit}`; + const cacheKey = `search:library:${type}:${ + genre || "" + }:${query}:${searchLimit}`; try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(`[SEARCH] Cache hit for query="${query}"`); + logger.debug(`[SEARCH] Cache hit for query="${query}"`); return res.json(JSON.parse(cached)); } } catch (err) { @@ -119,6 +122,7 @@ router.get("/", async (req, res) => { tracks: [], audiobooks: [], podcasts: [], + episodes: [], }; // Search artists using full-text search (only show artists with actual albums in library) @@ -246,41 +250,48 @@ router.get("/", async (req, res) => { } } - // Search audiobooks + // Search audiobooks using FTS if (type === "all" || type === "audiobooks") { try { - const audiobooks = await audiobookshelfService.searchAudiobooks( - query - ); - results.audiobooks = audiobooks.slice(0, searchLimit); + const audiobooks = await searchService.searchAudiobooksFTS({ + query, + limit: searchLimit, + }); + results.audiobooks = audiobooks; } catch (error) { - console.error("Audiobook search error:", error); + logger.error("Audiobook search error:", error); results.audiobooks = []; } } - // Search podcasts (search through owned podcasts) + // Search podcasts using FTS if (type === "all" || type === "podcasts") { try { - const allPodcasts = - await audiobookshelfService.getAllPodcasts(); - results.podcasts = allPodcasts - .filter( - (p) => - p.media?.metadata?.title - ?.toLowerCase() - .includes(query.toLowerCase()) || - p.media?.metadata?.author - ?.toLowerCase() - .includes(query.toLowerCase()) - ) - .slice(0, searchLimit); + const podcasts = await searchService.searchPodcastsFTS({ + query, + limit: searchLimit, + }); + results.podcasts = podcasts; } catch (error) { - console.error("Podcast search error:", error); + logger.error("Podcast search error:", error); results.podcasts = []; } } + // Search podcast episodes + if (type === "all" || type === "episodes") { + try { + const episodes = await searchService.searchEpisodes({ + query, + limit: searchLimit, + }); + results.episodes = episodes; + } catch (error) { + logger.error("Episode search error:", error); + results.episodes = []; + } + } + // Cache search results for 2 minutes (library can change) try { await redisClient.setEx(cacheKey, 120, JSON.stringify(results)); @@ -290,7 +301,7 @@ router.get("/", async (req, res) => { res.json(results); } catch (error) { - console.error("Search error:", error); + logger.error("Search error:", error); res.status(500).json({ error: "Search failed" }); } }); @@ -315,7 +326,7 @@ router.get("/genres", async (req, res) => { })) ); } catch (error) { - console.error("Get genres error:", error); + logger.error("Get genres error:", error); res.status(500).json({ error: "Failed to get genres" }); } }); @@ -339,13 +350,13 @@ router.get("/discover", async (req, res) => { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log( + logger.debug( `[SEARCH DISCOVER] Cache hit for query="${query}" type=${type}` ); return res.json(JSON.parse(cached)); } } catch (err) { - console.warn("[SEARCH DISCOVER] Redis read error:", err); + logger.warn("[SEARCH DISCOVER] Redis read error:", err); } const results: any[] = []; @@ -353,27 +364,56 @@ router.get("/discover", async (req, res) => { if (type === "music" || type === "all") { // Search Last.fm for artists AND tracks try { - // Search for artists + // Check if query is a potential alias + let searchQuery = query; + let aliasInfo: any = null; + + try { + const correction = await lastFmService.getArtistCorrection(query); + if (correction?.corrected) { + // Query is an alias - search for canonical name instead + searchQuery = correction.canonicalName; + aliasInfo = { + type: "alias_resolution", + original: query, + canonical: correction.canonicalName, + mbid: correction.mbid, + }; + logger.debug( + `[SEARCH DISCOVER] Alias resolved: "${query}" → "${correction.canonicalName}"` + ); + } + } catch (correctionError) { + logger.warn("[SEARCH DISCOVER] Correction check failed:", correctionError); + } + + // Search for artists (using potentially corrected query) const lastfmArtistResults = await lastFmService.searchArtists( - query, + searchQuery, searchLimit ); - console.log( + logger.debug( `[SEARCH ENDPOINT] Found ${lastfmArtistResults.length} artist results` ); + + // Add alias info to response if applicable + if (aliasInfo) { + results.push(aliasInfo); + } + results.push(...lastfmArtistResults); - // Search for tracks (songs) + // Search for tracks (songs) - use corrected query for consistency const lastfmTrackResults = await lastFmService.searchTracks( - query, + searchQuery, searchLimit ); - console.log( + logger.debug( `[SEARCH ENDPOINT] Found ${lastfmTrackResults.length} track results` ); results.push(...lastfmTrackResults); } catch (error) { - console.error("Last.fm search error:", error); + logger.error("Last.fm search error:", error); } } @@ -410,7 +450,7 @@ router.get("/discover", async (req, res) => { results.push(...podcasts); } catch (error) { - console.error("iTunes podcast search error:", error); + logger.error("iTunes podcast search error:", error); } } @@ -419,12 +459,12 @@ router.get("/discover", async (req, res) => { try { await redisClient.setEx(cacheKey, 900, JSON.stringify(payload)); } catch (err) { - console.warn("[SEARCH DISCOVER] Redis write error:", err); + logger.warn("[SEARCH DISCOVER] Redis write error:", err); } res.json(payload); } catch (error) { - console.error("Discovery search error:", error); + logger.error("Discovery search error:", error); res.status(500).json({ error: "Discovery search failed" }); } }); diff --git a/backend/src/routes/settings.ts b/backend/src/routes/settings.ts index e980994..0ceb73f 100644 --- a/backend/src/routes/settings.ts +++ b/backend/src/routes/settings.ts @@ -1,7 +1,9 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth } from "../middleware/auth"; import { prisma } from "../utils/db"; import { z } from "zod"; +import { staleJobCleanupService } from "../services/staleJobCleanup"; const router = Router(); @@ -38,7 +40,7 @@ router.get("/", async (req, res) => { res.json(settings); } catch (error) { - console.error("Get settings error:", error); + logger.error("Get settings error:", error); res.status(500).json({ error: "Failed to get settings" }); } }); @@ -65,9 +67,30 @@ router.post("/", async (req, res) => { .status(400) .json({ error: "Invalid settings", details: error.errors }); } - console.error("Update settings error:", error); + logger.error("Update settings error:", error); res.status(500).json({ error: "Failed to update settings" }); } }); +// POST /settings/cleanup-stale-jobs +router.post("/cleanup-stale-jobs", async (req, res) => { + try { + const result = await staleJobCleanupService.cleanupAll(); + + res.json({ + success: true, + cleaned: { + discoveryBatches: result.discoveryBatches, + downloadJobs: result.downloadJobs, + spotifyImportJobs: result.spotifyImportJobs, + bullQueues: result.bullQueues, + }, + totalCleaned: result.totalCleaned, + }); + } catch (error) { + logger.error("Stale job cleanup error:", error); + res.status(500).json({ error: "Failed to cleanup stale jobs" }); + } +}); + export default router; diff --git a/backend/src/routes/soulseek.ts b/backend/src/routes/soulseek.ts index 4be669e..d2d7c49 100644 --- a/backend/src/routes/soulseek.ts +++ b/backend/src/routes/soulseek.ts @@ -1,3 +1,5 @@ +import { logger } from "../utils/logger"; + /** * Soulseek routes - Direct connection via slsk-client * Simplified API for status and manual search/download @@ -23,7 +25,7 @@ async function requireSoulseekConfigured(req: any, res: any, next: any) { next(); } catch (error) { - console.error("Error checking Soulseek settings:", error); + logger.error("Error checking Soulseek settings:", error); res.status(500).json({ error: "Failed to check settings" }); } } @@ -52,7 +54,7 @@ router.get("/status", requireAuth, async (req, res) => { username: status.username, }); } catch (error: any) { - console.error("Soulseek status error:", error.message); + logger.error("Soulseek status error:", error.message); res.status(500).json({ error: "Failed to get Soulseek status", details: error.message, @@ -73,7 +75,7 @@ router.post("/connect", requireAuth, requireSoulseekConfigured, async (req, res) message: "Connected to Soulseek network", }); } catch (error: any) { - console.error("Soulseek connect error:", error.message); + logger.error("Soulseek connect error:", error.message); res.status(500).json({ error: "Failed to connect to Soulseek", details: error.message, @@ -95,7 +97,7 @@ router.post("/search", requireAuth, requireSoulseekConfigured, async (req, res) }); } - console.log(`[Soulseek] Searching: "${artist} - ${title}"`); + logger.debug(`[Soulseek] Searching: "${artist} - ${title}"`); const result = await soulseekService.searchTrack(artist, title); @@ -117,7 +119,7 @@ router.post("/search", requireAuth, requireSoulseekConfigured, async (req, res) }); } } catch (error: any) { - console.error("Soulseek search error:", error.message); + logger.error("Soulseek search error:", error.message); res.status(500).json({ error: "Search failed", details: error.message, @@ -148,7 +150,7 @@ router.post("/download", requireAuth, requireSoulseekConfigured, async (req, res }); } - console.log(`[Soulseek] Downloading: "${artist} - ${title}"`); + logger.debug(`[Soulseek] Downloading: "${artist} - ${title}"`); const result = await soulseekService.searchAndDownload( artist, @@ -169,7 +171,7 @@ router.post("/download", requireAuth, requireSoulseekConfigured, async (req, res }); } } catch (error: any) { - console.error("Soulseek download error:", error.message); + logger.error("Soulseek download error:", error.message); res.status(500).json({ error: "Download failed", details: error.message, diff --git a/backend/src/routes/spotify.ts b/backend/src/routes/spotify.ts index c0910d4..62aaba2 100644 --- a/backend/src/routes/spotify.ts +++ b/backend/src/routes/spotify.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuthOrToken } from "../middleware/auth"; import { z } from "zod"; import { spotifyService } from "../services/spotify"; @@ -51,7 +52,7 @@ router.post("/parse", async (req, res) => { url: `https://open.spotify.com/playlist/${parsed.id}`, }); } catch (error: any) { - console.error("Spotify parse error:", error); + logger.error("Spotify parse error:", error); if (error.name === "ZodError") { return res.status(400).json({ error: "Invalid request body" }); } @@ -67,7 +68,7 @@ router.post("/preview", async (req, res) => { try { const { url } = parseUrlSchema.parse(req.body); - console.log(`[Playlist Import] Generating preview for: ${url}`); + logger.debug(`[Playlist Import] Generating preview for: ${url}`); // Detect if it's a Deezer URL if (url.includes("deezer.com")) { @@ -94,7 +95,7 @@ router.post("/preview", async (req, res) => { deezerPlaylist ); - console.log( + logger.debug( `[Playlist Import] Deezer preview generated: ${preview.summary.total} tracks, ${preview.summary.inLibrary} in library` ); res.json(preview); @@ -102,13 +103,13 @@ router.post("/preview", async (req, res) => { // Handle Spotify URL const preview = await spotifyImportService.generatePreview(url); - console.log( + logger.debug( `[Spotify Import] Preview generated: ${preview.summary.total} tracks, ${preview.summary.inLibrary} in library` ); res.json(preview); } } catch (error: any) { - console.error("Playlist preview error:", error); + logger.error("Playlist preview error:", error); if (error.name === "ZodError") { return res.status(400).json({ error: "Invalid request body" }); } @@ -124,6 +125,9 @@ router.post("/preview", async (req, res) => { */ router.post("/import", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const { spotifyPlaylistId, url, playlistName, albumMbidsToDownload } = importSchema.parse(req.body); const userId = req.user.id; @@ -155,10 +159,10 @@ router.post("/import", async (req, res) => { preview = await spotifyImportService.generatePreview(effectiveUrl); } - console.log( + logger.debug( `[Spotify Import] Starting import for user ${userId}: ${playlistName}` ); - console.log( + logger.debug( `[Spotify Import] Downloading ${albumMbidsToDownload.length} albums` ); @@ -176,7 +180,7 @@ router.post("/import", async (req, res) => { message: "Import started", }); } catch (error: any) { - console.error("Spotify import error:", error); + logger.error("Spotify import error:", error); if (error.name === "ZodError") { return res.status(400).json({ error: "Invalid request body" }); } @@ -192,6 +196,9 @@ router.post("/import", async (req, res) => { */ router.get("/import/:jobId/status", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const { jobId } = req.params; const userId = req.user.id; @@ -209,7 +216,7 @@ router.get("/import/:jobId/status", async (req, res) => { res.json(job); } catch (error: any) { - console.error("Spotify job status error:", error); + logger.error("Spotify job status error:", error); res.status(500).json({ error: error.message || "Failed to get job status", }); @@ -222,11 +229,14 @@ router.get("/import/:jobId/status", async (req, res) => { */ router.get("/imports", async (req, res) => { try { + if (!req.user) { + return res.status(401).json({ error: "Unauthorized" }); + } const userId = req.user.id; const jobs = await spotifyImportService.getUserJobs(userId); res.json(jobs); } catch (error: any) { - console.error("Spotify imports error:", error); + logger.error("Spotify imports error:", error); res.status(500).json({ error: error.message || "Failed to get imports", }); @@ -240,6 +250,7 @@ router.get("/imports", async (req, res) => { router.post("/import/:jobId/refresh", async (req, res) => { try { const { jobId } = req.params; + if (!req.user) return res.status(401).json({ error: "Unauthorized" }); const userId = req.user.id; const job = await spotifyImportService.getJob(jobId); @@ -265,7 +276,7 @@ router.post("/import/:jobId/refresh", async (req, res) => { total: result.total, }); } catch (error: any) { - console.error("Spotify refresh error:", error); + logger.error("Spotify refresh error:", error); res.status(500).json({ error: error.message || "Failed to refresh tracks", }); @@ -279,7 +290,7 @@ router.post("/import/:jobId/refresh", async (req, res) => { router.post("/import/:jobId/cancel", async (req, res) => { try { const { jobId } = req.params; - const userId = req.user.id; + const userId = req.user!.id; const job = await spotifyImportService.getJob(jobId); if (!job) { @@ -303,7 +314,7 @@ router.post("/import/:jobId/cancel", async (req, res) => { tracksMatched: result.tracksMatched, }); } catch (error: any) { - console.error("Spotify cancel error:", error); + logger.error("Spotify cancel error:", error); res.status(500).json({ error: error.message || "Failed to cancel import", }); @@ -324,7 +335,7 @@ router.get("/import/session-log", async (req, res) => { content: log, }); } catch (error: any) { - console.error("Session log error:", error); + logger.error("Session log error:", error); res.status(500).json({ error: error.message || "Failed to read session log", }); diff --git a/backend/src/routes/systemSettings.ts b/backend/src/routes/systemSettings.ts index 9f6b8b3..ba00477 100644 --- a/backend/src/routes/systemSettings.ts +++ b/backend/src/routes/systemSettings.ts @@ -1,4 +1,5 @@ import { Router } from "express"; +import { logger } from "../utils/logger"; import { requireAuth, requireAdmin } from "../middleware/auth"; import { prisma } from "../utils/db"; import { z } from "zod"; @@ -17,7 +18,7 @@ function safeDecrypt(value: string | null): string | null { try { return decrypt(value); } catch (error) { - console.warn("[Settings Route] Failed to decrypt field, returning null"); + logger.warn("[Settings Route] Failed to decrypt field, returning null"); return null; } } @@ -31,6 +32,7 @@ const systemSettingsSchema = z.object({ lidarrEnabled: z.boolean().optional(), lidarrUrl: z.string().optional(), lidarrApiKey: z.string().nullable().optional(), + lidarrWebhookSecret: z.string().nullable().optional(), // AI Services openaiEnabled: z.boolean().optional(), @@ -41,6 +43,8 @@ const systemSettingsSchema = z.object({ fanartEnabled: z.boolean().optional(), fanartApiKey: z.string().nullable().optional(), + lastfmApiKey: z.string().nullable().optional(), + // Media Services audiobookshelfEnabled: z.boolean().optional(), audiobookshelfUrl: z.string().optional(), @@ -66,10 +70,11 @@ const systemSettingsSchema = z.object({ maxConcurrentDownloads: z.number().optional(), downloadRetryAttempts: z.number().optional(), transcodeCacheMaxGb: z.number().optional(), + soulseekConcurrentDownloads: z.number().min(1).max(10).optional(), // Download Preferences downloadSource: z.enum(["soulseek", "lidarr"]).optional(), - soulseekFallback: z.enum(["none", "lidarr"]).optional(), + primaryFailureFallback: z.enum(["none", "lidarr", "soulseek"]).optional(), }); // GET /system-settings @@ -107,8 +112,10 @@ router.get("/", async (req, res) => { const decryptedSettings = { ...settings, lidarrApiKey: safeDecrypt(settings.lidarrApiKey), + lidarrWebhookSecret: safeDecrypt(settings.lidarrWebhookSecret), openaiApiKey: safeDecrypt(settings.openaiApiKey), fanartApiKey: safeDecrypt(settings.fanartApiKey), + lastfmApiKey: safeDecrypt(settings.lastfmApiKey), audiobookshelfApiKey: safeDecrypt(settings.audiobookshelfApiKey), soulseekPassword: safeDecrypt(settings.soulseekPassword), spotifyClientSecret: safeDecrypt(settings.spotifyClientSecret), @@ -116,7 +123,7 @@ router.get("/", async (req, res) => { res.json(decryptedSettings); } catch (error) { - console.error("Get system settings error:", error); + logger.error("Get system settings error:", error); res.status(500).json({ error: "Failed to get system settings" }); } }); @@ -126,8 +133,8 @@ router.post("/", async (req, res) => { try { const data = systemSettingsSchema.parse(req.body); - console.log("[SYSTEM SETTINGS] Saving settings..."); - console.log( + logger.debug("[SYSTEM SETTINGS] Saving settings..."); + logger.debug( "[SYSTEM SETTINGS] transcodeCacheMaxGb:", data.transcodeCacheMaxGb ); @@ -137,10 +144,14 @@ router.post("/", async (req, res) => { if (data.lidarrApiKey) encryptedData.lidarrApiKey = encrypt(data.lidarrApiKey); + if (data.lidarrWebhookSecret) + encryptedData.lidarrWebhookSecret = encrypt(data.lidarrWebhookSecret); if (data.openaiApiKey) encryptedData.openaiApiKey = encrypt(data.openaiApiKey); if (data.fanartApiKey) encryptedData.fanartApiKey = encrypt(data.fanartApiKey); + if (data.lastfmApiKey) + encryptedData.lastfmApiKey = encrypt(data.lastfmApiKey); if (data.audiobookshelfApiKey) encryptedData.audiobookshelfApiKey = encrypt( data.audiobookshelfApiKey @@ -161,19 +172,27 @@ router.post("/", async (req, res) => { invalidateSystemSettingsCache(); + // Refresh Last.fm API key if it was updated + try { + const { lastFmService } = await import("../services/lastfm"); + await lastFmService.refreshApiKey(); + } catch (err) { + logger.warn("Failed to refresh Last.fm API key:", err); + } + // If Audiobookshelf was disabled, clear all audiobook-related data if (data.audiobookshelfEnabled === false) { - console.log( + logger.debug( "[CLEANUP] Audiobookshelf disabled - clearing all audiobook data from database" ); try { const deletedProgress = await prisma.audiobookProgress.deleteMany({}); - console.log( + logger.debug( ` Deleted ${deletedProgress.count} audiobook progress entries` ); } catch (clearError) { - console.error("Failed to clear audiobook data:", clearError); + logger.error("Failed to clear audiobook data:", clearError); // Don't fail the request } } @@ -191,28 +210,28 @@ router.post("/", async (req, res) => { SOULSEEK_USERNAME: data.soulseekUsername || null, SOULSEEK_PASSWORD: data.soulseekPassword || null, }); - console.log(".env file synchronized with database settings"); + logger.debug(".env file synchronized with database settings"); } catch (envError) { - console.error("Failed to write .env file:", envError); + logger.error("Failed to write .env file:", envError); // Don't fail the request if .env write fails } // Auto-configure Lidarr webhook if Lidarr is enabled if (data.lidarrEnabled && data.lidarrUrl && data.lidarrApiKey) { try { - console.log("[LIDARR] Auto-configuring webhook..."); + logger.debug("[LIDARR] Auto-configuring webhook..."); const axios = (await import("axios")).default; const lidarrUrl = data.lidarrUrl; const apiKey = data.lidarrApiKey; // Determine webhook URL - // Use LIDIFY_CALLBACK_URL env var if set, otherwise default to host.docker.internal:3030 - // Port 3030 is the external Nginx port that Lidarr can reach - const callbackHost = process.env.LIDIFY_CALLBACK_URL || "http://host.docker.internal:3030"; + // Use LIDIFY_CALLBACK_URL env var if set, otherwise default to backend:3006 + // In Docker, services communicate via Docker network names (backend, lidarr, etc.) + const callbackHost = process.env.LIDIFY_CALLBACK_URL || "http://backend:3006"; const webhookUrl = `${callbackHost}/api/webhooks/lidarr`; - console.log(` Webhook URL: ${webhookUrl}`); + logger.debug(` Webhook URL: ${webhookUrl}`); // Check if webhook already exists - find by name "Lidify" OR by URL containing "lidify" or "webhooks/lidarr" const notificationsResponse = await axios.get( @@ -241,10 +260,10 @@ router.post("/", async (req, res) => { if (existingWebhook) { const currentUrl = existingWebhook.fields?.find((f: any) => f.name === "url")?.value; - console.log(` Found existing webhook: "${existingWebhook.name}" with URL: ${currentUrl}`); + logger.debug(` Found existing webhook: "${existingWebhook.name}" with URL: ${currentUrl}`); if (currentUrl !== webhookUrl) { - console.log(` URL needs updating from: ${currentUrl}`); - console.log(` URL will be updated to: ${webhookUrl}`); + logger.debug(` URL needs updating from: ${currentUrl}`); + logger.debug(` URL will be updated to: ${webhookUrl}`); } } @@ -293,7 +312,7 @@ router.post("/", async (req, res) => { timeout: 10000, } ); - console.log(" Webhook updated"); + logger.debug(" Webhook updated"); } else { // Create new webhook (use forceSave to skip test) await axios.post( @@ -304,22 +323,22 @@ router.post("/", async (req, res) => { timeout: 10000, } ); - console.log(" Webhook created"); + logger.debug(" Webhook created"); } - console.log("Lidarr webhook configured automatically\n"); + logger.debug("Lidarr webhook configured automatically\n"); } catch (webhookError: any) { - console.error( + logger.error( "Failed to auto-configure webhook:", webhookError.message ); if (webhookError.response?.data) { - console.error( + logger.error( " Lidarr error details:", JSON.stringify(webhookError.response.data, null, 2) ); } - console.log( + logger.debug( " User can configure webhook manually in Lidarr UI\n" ); // Don't fail the request if webhook config fails @@ -338,7 +357,7 @@ router.post("/", async (req, res) => { .status(400) .json({ error: "Invalid settings", details: error.errors }); } - console.error("Update system settings error:", error); + logger.error("Update system settings error:", error); res.status(500).json({ error: "Failed to update system settings" }); } }); @@ -348,7 +367,7 @@ router.post("/test-lidarr", async (req, res) => { try { const { url, apiKey } = req.body; - console.log("[Lidarr Test] Testing connection to:", url); + logger.debug("[Lidarr Test] Testing connection to:", url); if (!url || !apiKey) { return res @@ -368,7 +387,7 @@ router.post("/test-lidarr", async (req, res) => { } ); - console.log( + logger.debug( "[Lidarr Test] Connection successful, version:", response.data.version ); @@ -379,8 +398,8 @@ router.post("/test-lidarr", async (req, res) => { version: response.data.version, }); } catch (error: any) { - console.error("[Lidarr Test] Error:", error.message); - console.error( + logger.error("[Lidarr Test] Error:", error.message); + logger.error( "[Lidarr Test] Details:", error.response?.data || error.code ); @@ -433,7 +452,7 @@ router.post("/test-openai", async (req, res) => { model: response.data.model, }); } catch (error: any) { - console.error("OpenAI test error:", error.message); + logger.error("OpenAI test error:", error.message); res.status(500).json({ error: "Failed to connect to OpenAI", details: error.response?.data?.error?.message || error.message, @@ -469,7 +488,7 @@ router.post("/test-fanart", async (req, res) => { message: "Fanart.tv connection successful", }); } catch (error: any) { - console.error("Fanart.tv test error:", error.message); + logger.error("Fanart.tv test error:", error.message); if (error.response?.status === 401) { res.status(401).json({ error: "Invalid Fanart.tv API key", @@ -483,6 +502,59 @@ router.post("/test-fanart", async (req, res) => { } }); +// Test Last.fm connection +router.post("/test-lastfm", async (req, res) => { + try { + const { lastfmApiKey } = req.body; + + if (!lastfmApiKey) { + return res.status(400).json({ error: "API key is required" }); + } + + const axios = require("axios"); + + // Test with a known artist (The Beatles) + const testArtist = "The Beatles"; + + const response = await axios.get( + "http://ws.audioscrobbler.com/2.0/", + { + params: { + method: "artist.getinfo", + artist: testArtist, + api_key: lastfmApiKey, + format: "json", + }, + timeout: 5000, + } + ); + + // If we get here and have artist data, the API key is valid + if (response.data.artist) { + res.json({ + success: true, + message: "Last.fm connection successful", + }); + } else { + res.status(500).json({ + error: "Unexpected response from Last.fm", + }); + } + } catch (error: any) { + logger.error("Last.fm test error:", error.message); + if (error.response?.status === 403 || error.response?.data?.error === 10) { + res.status(401).json({ + error: "Invalid Last.fm API key", + }); + } else { + res.status(500).json({ + error: "Failed to connect to Last.fm", + details: error.response?.data || error.message, + }); + } + } +}); + // Test Audiobookshelf connection router.post("/test-audiobookshelf", async (req, res) => { try { @@ -509,7 +581,7 @@ router.post("/test-audiobookshelf", async (req, res) => { libraries: response.data.libraries?.length || 0, }); } catch (error: any) { - console.error("Audiobookshelf test error:", error.message); + logger.error("Audiobookshelf test error:", error.message); if (error.response?.status === 401 || error.response?.status === 403) { res.status(401).json({ error: "Invalid Audiobookshelf API key", @@ -534,7 +606,7 @@ router.post("/test-soulseek", async (req, res) => { }); } - console.log(`[SOULSEEK-TEST] Testing connection as "${username}"...`); + logger.debug(`[SOULSEEK-TEST] Testing connection as "${username}"...`); // Import soulseek service const { soulseekService } = await import("../services/soulseek"); @@ -550,10 +622,10 @@ router.post("/test-soulseek", async (req, res) => { { user: username, pass: password }, (err: Error | null, client: any) => { if (err) { - console.log(`[SOULSEEK-TEST] Connection failed: ${err.message}`); + logger.debug(`[SOULSEEK-TEST] Connection failed: ${err.message}`); return reject(err); } - console.log(`[SOULSEEK-TEST] Connected successfully`); + logger.debug(`[SOULSEEK-TEST] Connected successfully`); // We don't need to keep the connection open for the test resolve(); } @@ -567,14 +639,14 @@ router.post("/test-soulseek", async (req, res) => { isConnected: true, }); } catch (connectError: any) { - console.error(`[SOULSEEK-TEST] Error: ${connectError.message}`); + logger.error(`[SOULSEEK-TEST] Error: ${connectError.message}`); res.status(401).json({ error: "Invalid Soulseek credentials or connection failed", details: connectError.message, }); } } catch (error: any) { - console.error("[SOULSEEK-TEST] Error:", error.message); + logger.error("[SOULSEEK-TEST] Error:", error.message); res.status(500).json({ error: "Failed to test Soulseek connection", details: error.message, @@ -593,22 +665,39 @@ router.post("/test-spotify", async (req, res) => { }); } - // Import spotifyService to test credentials - const { spotifyService } = await import("../services/spotify"); - const result = await spotifyService.testCredentials(clientId, clientSecret); + // Test credentials by trying to get an access token + const axios = require("axios"); + try { + const response = await axios.post( + "https://accounts.spotify.com/api/token", + "grant_type=client_credentials", + { + headers: { + "Content-Type": "application/x-www-form-urlencoded", + Authorization: `Basic ${Buffer.from(`${clientId}:${clientSecret}`).toString("base64")}`, + }, + timeout: 10000, + } + ); - if (result.success) { - res.json({ - success: true, - message: "Spotify credentials are valid", - }); - } else { + if (response.data.access_token) { + res.json({ + success: true, + message: "Spotify credentials are valid", + }); + } else { + res.status(401).json({ + error: "Invalid Spotify credentials", + }); + } + } catch (tokenError: any) { res.status(401).json({ - error: result.error || "Invalid Spotify credentials", + error: "Invalid Spotify credentials", + details: tokenError.response?.data?.error_description || tokenError.message, }); } } catch (error: any) { - console.error("Spotify test error:", error.message); + logger.error("Spotify test error:", error.message); res.status(500).json({ error: "Failed to test Spotify credentials", details: error.message, @@ -661,7 +750,7 @@ router.post("/clear-caches", async (req, res) => { ); if (keysToDelete.length > 0) { - console.log( + logger.debug( `[CACHE] Clearing ${ keysToDelete.length } cache entries (excluding ${ @@ -671,7 +760,7 @@ router.post("/clear-caches", async (req, res) => { for (const key of keysToDelete) { await redisClient.del(key); } - console.log( + logger.debug( `[CACHE] Successfully cleared ${keysToDelete.length} cache entries` ); @@ -701,7 +790,7 @@ router.post("/clear-caches", async (req, res) => { }); } } catch (error: any) { - console.error("Clear caches error:", error); + logger.error("Clear caches error:", error); res.status(500).json({ error: "Failed to clear caches", details: error.message, diff --git a/backend/src/routes/webhooks.ts b/backend/src/routes/webhooks.ts index 953829f..a646365 100644 --- a/backend/src/routes/webhooks.ts +++ b/backend/src/routes/webhooks.ts @@ -6,15 +6,26 @@ */ import { Router } from "express"; -import { prisma } from "../utils/db"; import { scanQueue } from "../workers/queues"; -import { discoverWeeklyService } from "../services/discoverWeekly"; import { simpleDownloadManager } from "../services/simpleDownloadManager"; import { queueCleaner } from "../jobs/queueCleaner"; import { getSystemSettings } from "../utils/systemSettings"; +import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; const router = Router(); +// GET /webhooks/lidarr/verify - Webhook verification endpoint +router.get("/lidarr/verify", (req, res) => { + logger.debug("[WEBHOOK] Verification request received"); + res.json({ + status: "ok", + timestamp: new Date().toISOString(), + service: "lidify", + version: process.env.npm_package_version || "unknown", + }); +}); + // POST /webhooks/lidarr - Handle Lidarr webhooks router.post("/lidarr", async (req, res) => { try { @@ -25,7 +36,7 @@ router.post("/lidarr", async (req, res) => { !settings?.lidarrUrl || !settings?.lidarrApiKey ) { - console.log( + logger.debug( `[WEBHOOK] Lidarr webhook received but Lidarr is disabled. Ignoring.` ); return res.status(202).json({ @@ -35,12 +46,27 @@ router.post("/lidarr", async (req, res) => { }); } + // Verify webhook secret if configured + // Note: settings.lidarrWebhookSecret is already decrypted by getSystemSettings() + if (settings.lidarrWebhookSecret) { + const providedSecret = req.headers["x-webhook-secret"] as string; + + if (!providedSecret || providedSecret !== settings.lidarrWebhookSecret) { + logger.debug( + `[WEBHOOK] Lidarr webhook received with invalid or missing secret` + ); + return res.status(401).json({ + error: "Unauthorized - Invalid webhook secret", + }); + } + } + const eventType = req.body.eventType; - console.log(`[WEBHOOK] Lidarr event: ${eventType}`); + logger.debug(`[WEBHOOK] Lidarr event: ${eventType}`); // Log payload in debug mode only (avoid verbose logs in production) if (process.env.DEBUG_WEBHOOKS === "true") { - console.log(` Payload:`, JSON.stringify(req.body, null, 2)); + logger.debug(` Payload:`, JSON.stringify(req.body, null, 2)); } switch (eventType) { @@ -68,16 +94,16 @@ router.post("/lidarr", async (req, res) => { break; case "Test": - console.log(" Lidarr test webhook received"); + logger.debug(" Lidarr test webhook received"); break; default: - console.log(` Unhandled event: ${eventType}`); + logger.debug(` Unhandled event: ${eventType}`); } res.json({ success: true }); } catch (error: any) { - console.error("Webhook error:", error.message); + logger.error("Webhook error:", error.message); res.status(500).json({ error: "Webhook processing failed" }); } }); @@ -93,12 +119,12 @@ async function handleGrab(payload: any) { const artistName = payload.artist?.name; const lidarrAlbumId = payload.albums?.[0]?.id; - console.log(` Album: ${artistName} - ${albumTitle}`); - console.log(` Download ID: ${downloadId}`); - console.log(` MBID: ${albumMbid}`); + logger.debug(` Album: ${artistName} - ${albumTitle}`); + logger.debug(` Download ID: ${downloadId}`); + logger.debug(` MBID: ${albumMbid}`); if (!downloadId) { - console.log(` Missing downloadId, skipping`); + logger.debug(` Missing downloadId, skipping`); return; } @@ -128,13 +154,13 @@ async function handleDownload(payload: any) { payload.album?.foreignAlbumId || payload.albums?.[0]?.foreignAlbumId; const lidarrAlbumId = payload.album?.id || payload.albums?.[0]?.id; - console.log(` Album: ${artistName} - ${albumTitle}`); - console.log(` Download ID: ${downloadId}`); - console.log(` Album MBID: ${albumMbid}`); - console.log(` Lidarr Album ID: ${lidarrAlbumId}`); + logger.debug(` Album: ${artistName} - ${albumTitle}`); + logger.debug(` Download ID: ${downloadId}`); + logger.debug(` Album MBID: ${albumMbid}`); + logger.debug(` Lidarr Album ID: ${lidarrAlbumId}`); if (!downloadId) { - console.log(` Missing downloadId, skipping`); + logger.debug(` Missing downloadId, skipping`); return; } @@ -148,36 +174,30 @@ async function handleDownload(payload: any) { ); if (result.jobId) { - // Check if this is part of a download batch (artist download) - if (result.downloadBatchId) { - // Check if all jobs in the batch are complete - const batchComplete = await checkDownloadBatchComplete( - result.downloadBatchId - ); - if (batchComplete) { - console.log( - ` All albums in batch complete, triggering library scan...` - ); - await scanQueue.add("scan", { - type: "full", - source: "lidarr-import-batch", - }); - } else { - console.log(` Batch not complete, skipping scan`); - } - } else if (!result.batchId) { - // Single album download (not part of discovery batch) - console.log(` Triggering library scan...`); - await scanQueue.add("scan", { - type: "full", - source: "lidarr-import", - }); - } - // If part of discovery batch, the download manager already called checkBatchCompletion + // Find the download job that triggered this webhook to get userId + const downloadJob = await prisma.downloadJob.findUnique({ + where: { id: result.jobId }, + select: { userId: true, id: true }, + }); + + // Trigger scan immediately for this album (incremental scan with enrichment data) + // Don't wait for batch completion - enrichment should happen per-album + logger.debug( + ` Triggering incremental scan for: ${artistName} - ${albumTitle}` + ); + await scanQueue.add("scan", { + userId: downloadJob?.userId || null, + source: "lidarr-webhook", + artistName: artistName, + albumMbid: albumMbid, + downloadId: result.jobId, + }); + + // Discovery batch completion (for playlist building) is handled by download manager } else { // No job found - this might be an external download not initiated by us // Still trigger a scan to pick up the new music - console.log(` No matching job, triggering scan anyway...`); + logger.debug(` No matching job, triggering scan anyway...`); await scanQueue.add("scan", { type: "full", source: "lidarr-import-external", @@ -185,26 +205,6 @@ async function handleDownload(payload: any) { } } -/** - * Check if all jobs in a download batch are complete - */ -async function checkDownloadBatchComplete(batchId: string): Promise { - const pendingJobs = await prisma.downloadJob.count({ - where: { - metadata: { - path: ["batchId"], - equals: batchId, - }, - status: { in: ["pending", "processing"] }, - }, - }); - - console.log( - ` Batch ${batchId}: ${pendingJobs} pending/processing jobs remaining` - ); - return pendingJobs === 0; -} - /** * Handle import failure with automatic retry */ @@ -215,12 +215,12 @@ async function handleImportFailure(payload: any) { const albumTitle = payload.album?.title || payload.release?.title; const reason = payload.message || "Import failed"; - console.log(` Album: ${albumTitle}`); - console.log(` Download ID: ${downloadId}`); - console.log(` Reason: ${reason}`); + logger.debug(` Album: ${albumTitle}`); + logger.debug(` Download ID: ${downloadId}`); + logger.debug(` Reason: ${reason}`); if (!downloadId) { - console.log(` Missing downloadId, skipping`); + logger.debug(` Missing downloadId, skipping`); return; } diff --git a/backend/src/services/acquisitionService.ts b/backend/src/services/acquisitionService.ts new file mode 100644 index 0000000..8ad518f --- /dev/null +++ b/backend/src/services/acquisitionService.ts @@ -0,0 +1,850 @@ +/** + * Unified Acquisition Service + * + * Consolidates album/track acquisition logic from Discovery Weekly and Playlist Import. + * Handles download source selection, behavior matrix routing, and job tracking. + * + * Phase 2.1: Initial implementation + * - Behavior matrix logic for primary/fallback source selection + * - Soulseek album acquisition (track list → batch download) + * - Lidarr album acquisition (webhook-based completion) + * - DownloadJob management with context-based tracking + */ + +import { logger } from "../utils/logger"; +import { prisma } from "../utils/db"; +import { getSystemSettings } from "../utils/systemSettings"; +import { soulseekService } from "./soulseek"; +import { simpleDownloadManager } from "./simpleDownloadManager"; +import { musicBrainzService } from "./musicbrainz"; +import { lastFmService } from "./lastfm"; +import { AcquisitionError, AcquisitionErrorType } from "./lidarr"; +import PQueue from "p-queue"; + +// ============================================ +// TYPE DEFINITIONS +// ============================================ + +/** + * Context for tracking acquisition origin + * Used to link download jobs to their source (Discovery batch or Spotify import) + */ +export interface AcquisitionContext { + userId: string; + discoveryBatchId?: string; + spotifyImportJobId?: string; + existingJobId?: string; +} + +/** + * Request to acquire an album + */ +export interface AlbumAcquisitionRequest { + albumTitle: string; + artistName: string; + mbid?: string; + lastfmUrl?: string; + requestedTracks?: Array<{ title: string; position?: number }>; +} + +/** + * Request to acquire individual tracks (for Unknown Album case) + */ +export interface TrackAcquisitionRequest { + trackTitle: string; + artistName: string; + albumTitle?: string; +} + +/** + * Result of an acquisition attempt + */ +export interface AcquisitionResult { + success: boolean; + downloadJobId?: number; + source?: "soulseek" | "lidarr"; + error?: string; + errorType?: AcquisitionErrorType; + isRecoverable?: boolean; + tracksDownloaded?: number; + tracksTotal?: number; + correlationId?: string; +} + +/** + * Service availability check result + */ +interface ServiceAvailability { + lidarrAvailable: boolean; + soulseekAvailable: boolean; +} + +/** + * Download behavior matrix configuration + */ +interface DownloadBehavior { + hasPrimarySource: boolean; + primarySource: "soulseek" | "lidarr" | null; + hasFallbackSource: boolean; + fallbackSource: "soulseek" | "lidarr" | null; +} + +// ============================================ +// ACQUISITION SERVICE +// ============================================ + +class AcquisitionService { + private albumQueue: PQueue; + + constructor() { + // Initialize album queue with concurrency of 2 (configurable) + this.albumQueue = new PQueue({ concurrency: 2 }); + logger.debug( + "[Acquisition] Initialized album queue with concurrency=2" + ); + } + + /** + * Get download behavior configuration (settings + service availability) + * Auto-detects and selects download source based on actual availability + */ + private async getDownloadBehavior(): Promise { + const settings = await getSystemSettings(); + + // Get download source settings + const downloadSource = settings?.downloadSource || "soulseek"; + const primaryFailureFallback = + settings?.primaryFailureFallback || "none"; + + // Determine actual availability + const hasSoulseek = await soulseekService.isAvailable(); + const hasLidarr = !!( + settings?.lidarrEnabled && + settings?.lidarrUrl && + settings?.lidarrApiKey + ); + + // Case 1: No sources available + if (!hasSoulseek && !hasLidarr) { + logger.debug( + "[Acquisition] Available sources: Lidarr=false, Soulseek=false" + ); + logger.error("[Acquisition] No download sources configured"); + return { + hasPrimarySource: false, + primarySource: null, + hasFallbackSource: false, + fallbackSource: null, + }; + } + + // Case 2: Only one source available - use it regardless of preference + if (hasSoulseek && !hasLidarr) { + logger.debug( + "[Acquisition] Available sources: Lidarr=false, Soulseek=true" + ); + logger.debug( + "[Acquisition] Using Soulseek as primary source (only source available)" + ); + logger.debug( + "[Acquisition] No fallback configured (only one source available)" + ); + return { + hasPrimarySource: true, + primarySource: "soulseek", + hasFallbackSource: false, + fallbackSource: null, + }; + } + + if (hasLidarr && !hasSoulseek) { + logger.debug( + "[Acquisition] Available sources: Lidarr=true, Soulseek=false" + ); + logger.debug( + "[Acquisition] Using Lidarr as primary source (only source available)" + ); + logger.debug( + "[Acquisition] No fallback configured (only one source available)" + ); + return { + hasPrimarySource: true, + primarySource: "lidarr", + hasFallbackSource: false, + fallbackSource: null, + }; + } + + // Case 3: Both available - respect user preference for primary + const userPrimary = downloadSource; // "soulseek" or "lidarr" + const alternative = userPrimary === "soulseek" ? "lidarr" : "soulseek"; + + // Auto-enable fallback if both sources are configured and no explicit setting + let useFallback = + primaryFailureFallback !== "none" && + primaryFailureFallback === alternative; + + // Auto-fallback: If both sources available and no explicit fallback set, enable it + if (!useFallback && primaryFailureFallback === "none") { + useFallback = true; + logger.debug( + `[Acquisition] Auto-enabled fallback: ${alternative} (both sources configured)` + ); + } + + logger.debug( + "[Acquisition] Available sources: Lidarr=true, Soulseek=true" + ); + logger.debug( + `[Acquisition] Using ${userPrimary} as primary source (user preference)` + ); + logger.debug( + `[Acquisition] Fallback configured: ${ + useFallback ? alternative : "none" + }` + ); + + return { + hasPrimarySource: true, + primarySource: userPrimary, + hasFallbackSource: useFallback, + fallbackSource: useFallback ? alternative : null, + }; + } + + /** + * Update download job with source-specific status text + * Stored in metadata for frontend display + */ + private async updateJobStatusText( + jobId: string, + source: "lidarr" | "soulseek", + attemptNumber: number + ): Promise { + const sourceLabel = source.charAt(0).toUpperCase() + source.slice(1); + const statusText = `${sourceLabel} #${attemptNumber}`; + + const job = await prisma.downloadJob.findUnique({ + where: { id: jobId }, + select: { metadata: true }, + }); + const existingMetadata = (job?.metadata as any) || {}; + + await prisma.downloadJob.update({ + where: { id: jobId }, + data: { + metadata: { + ...existingMetadata, + currentSource: source, + lidarrAttempts: + source === "lidarr" + ? attemptNumber + : existingMetadata.lidarrAttempts || 0, + soulseekAttempts: + source === "soulseek" + ? attemptNumber + : existingMetadata.soulseekAttempts || 0, + statusText, + }, + }, + }); + + logger.debug(`[Acquisition] Updated job ${jobId}: ${statusText}`); + } + + /** + * Acquire an album using the configured behavior matrix + * Routes to Soulseek or Lidarr based on settings, with fallback support + * Queued to enable parallel album acquisition + * + * @param request - Album to acquire + * @param context - Tracking context (userId, batchId, etc.) + * @returns Acquisition result + */ + async acquireAlbum( + request: AlbumAcquisitionRequest, + context: AcquisitionContext + ): Promise { + return this.albumQueue.add(() => + this.acquireAlbumInternal(request, context) + ); + } + + /** + * Internal album acquisition logic (called via queue) + */ + private async acquireAlbumInternal( + request: AlbumAcquisitionRequest, + context: AcquisitionContext + ): Promise { + logger.debug( + `\n[Acquisition] Acquiring album: ${request.artistName} - ${request.albumTitle} (queue: ${this.albumQueue.size} pending, ${this.albumQueue.pending} active)` + ); + + // Verify artist name before acquisition + try { + const correction = await lastFmService.getArtistCorrection( + request.artistName + ); + if (correction?.corrected) { + logger.debug( + `[Acquisition] Artist corrected: "${request.artistName}" → "${correction.canonicalName}"` + ); + request = { ...request, artistName: correction.canonicalName }; + } + } catch (error) { + logger.warn( + `[Acquisition] Artist correction failed for "${request.artistName}":`, + error + ); + } + + // Get download behavior configuration + const behavior = await this.getDownloadBehavior(); + + // Validate at least one source is available + if (!behavior.hasPrimarySource) { + const error = + "No download sources available (neither Soulseek nor Lidarr configured)"; + logger.error(`[Acquisition] ${error}`); + return { success: false, error }; + } + + // Try primary source first + let result: AcquisitionResult; + + if (behavior.primarySource === "soulseek") { + logger.debug(`[Acquisition] Trying primary: Soulseek`); + result = await this.acquireAlbumViaSoulseek(request, context); + + // Fallback to Lidarr if Soulseek fails and fallback is configured + if (!result.success) { + logger.debug( + `[Acquisition] Soulseek failed: ${result.error || "unknown error"}` + ); + logger.debug( + `[Acquisition] Fallback available: hasFallback=${behavior.hasFallbackSource}, source=${behavior.fallbackSource}` + ); + + if ( + behavior.hasFallbackSource && + behavior.fallbackSource === "lidarr" + ) { + logger.debug( + `[Acquisition] Attempting Lidarr fallback...` + ); + result = await this.acquireAlbumViaLidarr(request, context); + } else { + logger.debug( + `[Acquisition] No fallback configured or fallback not Lidarr` + ); + } + } + } else if (behavior.primarySource === "lidarr") { + logger.debug(`[Acquisition] Trying primary: Lidarr`); + result = await this.acquireAlbumViaLidarr(request, context); + + // Fallback to Soulseek if Lidarr fails and fallback is configured + if (!result.success) { + logger.debug( + `[Acquisition] Lidarr failed: ${result.error || "unknown error"}` + ); + logger.debug( + `[Acquisition] Fallback available: hasFallback=${behavior.hasFallbackSource}, source=${behavior.fallbackSource}` + ); + + if ( + behavior.hasFallbackSource && + behavior.fallbackSource === "soulseek" + ) { + logger.debug( + `[Acquisition] Attempting Soulseek fallback...` + ); + result = await this.acquireAlbumViaSoulseek(request, context); + } else { + logger.debug( + `[Acquisition] No fallback configured or fallback not Soulseek` + ); + } + } + } else { + // This should never happen due to validation above + const error = "No primary source configured"; + logger.error(`[Acquisition] ${error}`); + return { success: false, error }; + } + + return result; + } + + /** + * Acquire individual tracks via Soulseek (for Unknown Album case) + * Batch downloads tracks without album MBID + * + * @param requests - Tracks to acquire + * @param context - Tracking context + * @returns Array of acquisition results + */ + async acquireTracks( + requests: TrackAcquisitionRequest[], + context: AcquisitionContext + ): Promise { + logger.debug( + `\n[Acquisition] Acquiring ${requests.length} individual tracks via Soulseek` + ); + + // Check Soulseek availability + const soulseekAvailable = await soulseekService.isAvailable(); + if (!soulseekAvailable) { + logger.error( + `[Acquisition] Soulseek not available for track downloads` + ); + return requests.map(() => ({ + success: false, + error: "Soulseek not configured", + })); + } + + // Get music path + const settings = await getSystemSettings(); + const musicPath = settings?.musicPath; + if (!musicPath) { + logger.error(`[Acquisition] Music path not configured`); + return requests.map(() => ({ + success: false, + error: "Music path not configured", + })); + } + + // Prepare tracks for batch download + const tracksToDownload = requests.map((req) => ({ + artist: req.artistName, + title: req.trackTitle, + album: req.albumTitle || "Unknown Album", + })); + + try { + // Use Soulseek batch download + const batchResult = await soulseekService.searchAndDownloadBatch( + tracksToDownload, + musicPath, + settings?.soulseekConcurrentDownloads || 4 // concurrency + ); + + logger.debug( + `[Acquisition] Batch result: ${batchResult.successful}/${requests.length} tracks downloaded` + ); + + // Create individual results for each track + const results: AcquisitionResult[] = requests.map((req, index) => { + // Check if this track was in the successful list + // Note: We don't have per-track success info from batch, so we estimate + const success = index < batchResult.successful; + return { + success, + source: "soulseek" as const, + tracksDownloaded: success ? 1 : 0, + tracksTotal: 1, + error: success + ? undefined + : batchResult.errors[index] || "Download failed", + }; + }); + + return results; + } catch (error: any) { + logger.error( + `[Acquisition] Batch track download error: ${error.message}` + ); + return requests.map(() => ({ + success: false, + error: error.message, + })); + } + } + + /** + * Acquire album via Soulseek (track-by-track download) + * Gets track list from MusicBrainz or Last.fm, then batch downloads + * Marks job as completed immediately (no webhook needed) + * + * @param request - Album to acquire + * @param context - Tracking context + * @returns Acquisition result + */ + private async acquireAlbumViaSoulseek( + request: AlbumAcquisitionRequest, + context: AcquisitionContext + ): Promise { + logger.debug( + `[Acquisition/Soulseek] Downloading: ${request.artistName} - ${request.albumTitle}` + ); + + // Get music path + const settings = await getSystemSettings(); + const musicPath = settings?.musicPath; + if (!musicPath) { + return { success: false, error: "Music path not configured" }; + } + + if (!request.mbid) { + return { + success: false, + error: "Album MBID required for Soulseek download", + }; + } + + let job: any; + try { + // Create download job at start for tracking + job = await this.createDownloadJob(request, context); + + // Calculate attempt number (existing soulseek attempts + 1) + const jobMetadata = (job.metadata as any) || {}; + const soulseekAttempts = (jobMetadata.soulseekAttempts || 0) + 1; + await this.updateJobStatusText( + job.id, + "soulseek", + soulseekAttempts + ); + + let tracks: Array<{ title: string; position?: number }>; + + // If specific tracks requested, use those instead of full album + if (request.requestedTracks && request.requestedTracks.length > 0) { + tracks = request.requestedTracks; + logger.debug( + `[Acquisition/Soulseek] Using ${tracks.length} requested tracks (not full album)` + ); + } else { + // Strategy 1: Get track list from MusicBrainz + tracks = await musicBrainzService.getAlbumTracks(request.mbid); + + // Strategy 2: Fallback to Last.fm (always try when MusicBrainz fails) + if (!tracks || tracks.length === 0) { + logger.debug( + `[Acquisition/Soulseek] MusicBrainz has no tracks, trying Last.fm` + ); + + try { + const albumInfo = await lastFmService.getAlbumInfo( + request.artistName, + request.albumTitle + ); + const lastFmTracks = albumInfo?.tracks?.track || []; + + if (Array.isArray(lastFmTracks) && lastFmTracks.length > 0) { + tracks = lastFmTracks.map((t: any) => ({ + title: t.name || t.title, + position: t["@attr"]?.rank + ? parseInt(t["@attr"].rank) + : undefined, + })); + logger.debug( + `[Acquisition/Soulseek] Got ${tracks.length} tracks from Last.fm` + ); + } + } catch (lastfmError: any) { + logger.warn( + `[Acquisition/Soulseek] Last.fm fallback failed: ${lastfmError.message}` + ); + } + } + + if (!tracks || tracks.length === 0) { + // Mark job as failed + await this.updateJobStatus( + job.id, + "failed", + "Could not get track list from MusicBrainz or Last.fm" + ); + return { + success: false, + error: "Could not get track list from MusicBrainz or Last.fm", + }; + } + + logger.debug( + `[Acquisition/Soulseek] Found ${tracks.length} tracks for album` + ); + } + + // Prepare tracks for batch download + const tracksToDownload = tracks.map((track) => ({ + artist: request.artistName, + title: track.title, + album: request.albumTitle, + })); + + // Use Soulseek batch download (parallel with concurrency limit) + const batchResult = await soulseekService.searchAndDownloadBatch( + tracksToDownload, + musicPath, + settings?.soulseekConcurrentDownloads || 4 // concurrency + ); + + if (batchResult.successful === 0) { + // Mark job as failed + await this.updateJobStatus( + job.id, + "failed", + `No tracks found on Soulseek (searched ${tracks.length} tracks)` + ); + return { + success: false, + tracksTotal: tracks.length, + downloadJobId: parseInt(job.id), + error: `No tracks found on Soulseek (searched ${tracks.length} tracks)`, + }; + } + + // Success threshold: at least 50% of tracks + const successThreshold = Math.ceil(tracks.length * 0.5); + const isSuccess = batchResult.successful >= successThreshold; + + logger.debug( + `[Acquisition/Soulseek] Downloaded ${batchResult.successful}/${tracks.length} tracks (threshold: ${successThreshold})` + ); + + // Mark job as completed immediately (Soulseek doesn't use webhooks) + await this.updateJobStatus( + job.id, + isSuccess ? "completed" : "failed", + isSuccess + ? undefined + : `Only ${batchResult.successful}/${tracks.length} tracks found` + ); + + // Update job metadata with track counts + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + metadata: { + ...job.metadata, + tracksDownloaded: batchResult.successful, + tracksTotal: tracks.length, + }, + }, + }); + + return { + success: isSuccess, + source: "soulseek", + downloadJobId: parseInt(job.id), + tracksDownloaded: batchResult.successful, + tracksTotal: tracks.length, + error: isSuccess + ? undefined + : `Only ${batchResult.successful}/${tracks.length} tracks found`, + }; + } catch (error: any) { + logger.error(`[Acquisition/Soulseek] Error: ${error.message}`); + // Update job status if job was created + if (job) { + await this.updateJobStatus( + job.id, + "failed", + error.message + ).catch((e) => + logger.error( + `[Acquisition/Soulseek] Failed to update job status: ${e.message}` + ) + ); + } + return { success: false, error: error.message }; + } + } + + /** + * Acquire album via Lidarr (full album download) + * Creates download job and waits for webhook completion + * + * @param request - Album to acquire + * @param context - Tracking context + * @returns Acquisition result + */ + private async acquireAlbumViaLidarr( + request: AlbumAcquisitionRequest, + context: AcquisitionContext + ): Promise { + logger.debug( + `[Acquisition/Lidarr] Downloading: ${request.artistName} - ${request.albumTitle}` + ); + + if (!request.mbid) { + return { + success: false, + error: "Album MBID required for Lidarr download", + }; + } + + let job: any; + try { + // Create download job + job = await this.createDownloadJob(request, context); + + // Calculate attempt number (existing lidarr attempts + 1) + const jobMetadata = (job.metadata as any) || {}; + const lidarrAttempts = (jobMetadata.lidarrAttempts || 0) + 1; + await this.updateJobStatusText(job.id, "lidarr", lidarrAttempts); + + // Start Lidarr download + const isDiscovery = !!context.discoveryBatchId; + const result = await simpleDownloadManager.startDownload( + job.id, + request.artistName, + request.albumTitle, + request.mbid, + context.userId, + isDiscovery + ); + + if (result.success) { + logger.debug( + `[Acquisition/Lidarr] Download started (correlation: ${result.correlationId})` + ); + + return { + success: true, + source: "lidarr", + downloadJobId: parseInt(job.id), + correlationId: result.correlationId, + }; + } else { + logger.error( + `[Acquisition/Lidarr] Failed to start: ${result.error}` + ); + + // Mark job as failed + await this.updateJobStatus(job.id, "failed", result.error); + + // Return structured error info for fallback logic + return { + success: false, + error: result.error, + errorType: result.errorType, + isRecoverable: result.isRecoverable, + }; + } + } catch (error: any) { + logger.error(`[Acquisition/Lidarr] Error: ${error.message}`); + // Update job status if job was created + if (job) { + await this.updateJobStatus( + job.id, + "failed", + error.message + ).catch((e) => + logger.error( + `[Acquisition/Lidarr] Failed to update job status: ${e.message}` + ) + ); + } + return { success: false, error: error.message }; + } + } + + /** + * Create a DownloadJob for tracking acquisition + * Links to Discovery batch or Spotify import job as appropriate + * + * @param request - Album request + * @param context - Tracking context + * @returns Created download job + */ + private async createDownloadJob( + request: AlbumAcquisitionRequest, + context: AcquisitionContext + ): Promise { + // Check for existing job first + if (context.existingJobId) { + logger.debug( + `[Acquisition] Using existing download job: ${context.existingJobId}` + ); + return { id: context.existingJobId }; + } + + // Validate userId before creating download job to prevent foreign key constraint violations + if (!context.userId || typeof context.userId !== 'string' || context.userId === 'NaN' || context.userId === 'undefined' || context.userId === 'null') { + logger.error( + `[Acquisition] Invalid userId in context: ${JSON.stringify({ + userId: context.userId, + typeofUserId: typeof context.userId, + albumTitle: request.albumTitle, + artistName: request.artistName + })}` + ); + throw new Error(`Invalid userId in acquisition context: ${context.userId}`); + } + + const jobData: any = { + userId: context.userId, + subject: `${request.artistName} - ${request.albumTitle}`, + type: "album", + targetMbid: request.mbid || null, + status: "pending", + metadata: { + artistName: request.artistName, + albumTitle: request.albumTitle, + albumMbid: request.mbid, + }, + }; + + // Add context-based tracking + if (context.discoveryBatchId) { + jobData.discoveryBatchId = context.discoveryBatchId; + jobData.metadata.downloadType = "discovery"; + } + + if (context.spotifyImportJobId) { + jobData.metadata.spotifyImportJobId = context.spotifyImportJobId; + jobData.metadata.downloadType = "spotify_import"; + } + + const job = await prisma.downloadJob.create({ + data: jobData, + }); + + logger.debug( + `[Acquisition] Created download job: ${job.id} (type: ${ + jobData.metadata.downloadType || "library" + })` + ); + + return job; + } + + /** + * Update download job status + * + * @param jobId - Job ID to update + * @param status - New status + * @param error - Optional error message + */ + private async updateJobStatus( + jobId: string, + status: string, + error?: string + ): Promise { + await prisma.downloadJob.update({ + where: { id: jobId }, + data: { + status, + error: error || null, + completedAt: + status === "completed" || status === "failed" + ? new Date() + : undefined, + }, + }); + + logger.debug( + `[Acquisition] Updated job ${jobId}: status=${status}${ + error ? `, error=${error}` : "" + }` + ); + } +} + +// Export singleton instance +export const acquisitionService = new AcquisitionService(); diff --git a/backend/src/services/audioAnalysisCleanup.ts b/backend/src/services/audioAnalysisCleanup.ts new file mode 100644 index 0000000..d6bd173 --- /dev/null +++ b/backend/src/services/audioAnalysisCleanup.ts @@ -0,0 +1,232 @@ +import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; +import { enrichmentFailureService } from "./enrichmentFailureService"; + +const STALE_THRESHOLD_MINUTES = 5; +const MAX_RETRIES = 3; +const CIRCUIT_BREAKER_THRESHOLD = 30; // Increased from 10 to handle batch operations +const CIRCUIT_BREAKER_WINDOW_MS = 5 * 60 * 1000; // 5 minutes + +type CircuitState = 'closed' | 'open' | 'half-open'; + +class AudioAnalysisCleanupService { + private state: CircuitState = 'closed'; + private failureCount = 0; + private lastFailureTime: Date | null = null; + + /** + * Check if we should attempt to transition from open to half-open + */ + private shouldAttemptReset(): boolean { + if (!this.lastFailureTime) return false; + const timeSinceFailure = Date.now() - this.lastFailureTime.getTime(); + return timeSinceFailure >= CIRCUIT_BREAKER_WINDOW_MS; + } + + /** + * Handle successful operation - close circuit if in half-open state + */ + private onSuccess(): void { + if (this.state === 'half-open') { + logger.info( + `[AudioAnalysisCleanup] Circuit breaker CLOSED - recovery successful after ${this.failureCount} failures` + ); + this.state = 'closed'; + this.failureCount = 0; + this.lastFailureTime = null; + } else if (this.state === 'closed' && this.failureCount > 0) { + // Reset failure counter on success while closed + logger.debug( + "[AudioAnalysisCleanup] Resetting failure counter on success" + ); + this.failureCount = 0; + this.lastFailureTime = null; + } + } + + /** + * Handle failed operation - update state and counts + */ + private onFailure(resetCount: number, permanentlyFailedCount: number): void { + const totalFailures = resetCount + permanentlyFailedCount; + this.failureCount += totalFailures; + this.lastFailureTime = new Date(); + + if (this.state === 'half-open') { + // Failed during half-open - reopen circuit + this.state = 'open'; + logger.warn( + `[AudioAnalysisCleanup] Circuit breaker REOPENED - recovery attempt failed (${this.failureCount} total failures)` + ); + } else if (this.failureCount >= CIRCUIT_BREAKER_THRESHOLD) { + // Exceeded threshold - open circuit + this.state = 'open'; + logger.warn( + `[AudioAnalysisCleanup] Circuit breaker OPEN - ${this.failureCount} failures in window. ` + + `Pausing audio analysis queuing until analyzer shows signs of life.` + ); + } + } + + /** + * Check if circuit breaker is open (too many consecutive failures) + * Automatically transitions to half-open after cooldown period + */ + isCircuitOpen(): boolean { + if (this.state === 'open' && this.shouldAttemptReset()) { + this.state = 'half-open'; + logger.info( + `[AudioAnalysisCleanup] Circuit breaker HALF-OPEN - attempting recovery after ${ + CIRCUIT_BREAKER_WINDOW_MS / 60000 + } minute cooldown` + ); + } + return this.state === 'open'; + } + + /** + * Record success for external callers (maintains backward compatibility) + */ + recordSuccess(): void { + this.onSuccess(); + } + + /** + * Clean up tracks stuck in "processing" state + * Returns number of tracks reset and permanently failed + */ + async cleanupStaleProcessing(): Promise<{ + reset: number; + permanentlyFailed: number; + }> { + const cutoff = new Date( + Date.now() - STALE_THRESHOLD_MINUTES * 60 * 1000 + ); + + // Find tracks stuck in processing + const staleTracks = await prisma.track.findMany({ + where: { + analysisStatus: "processing", + OR: [ + { analysisStartedAt: { lt: cutoff } }, + { + analysisStartedAt: null, + updatedAt: { lt: cutoff }, + }, + ], + }, + include: { + album: { + include: { + artist: { select: { name: true } }, + }, + }, + }, + }); + + if (staleTracks.length === 0) { + return { reset: 0, permanentlyFailed: 0 }; + } + + logger.debug( + `[AudioAnalysisCleanup] Found ${staleTracks.length} stale tracks (processing > ${STALE_THRESHOLD_MINUTES} min)` + ); + + let resetCount = 0; + let permanentlyFailedCount = 0; + + for (const track of staleTracks) { + const newRetryCount = (track.analysisRetryCount || 0) + 1; + const trackName = `${track.album.artist.name} - ${track.title}`; + + if (newRetryCount >= MAX_RETRIES) { + // Permanently failed - mark as failed and record + await prisma.track.update({ + where: { id: track.id }, + data: { + analysisStatus: "failed", + analysisError: `Exceeded ${MAX_RETRIES} retry attempts (stale processing)`, + analysisRetryCount: newRetryCount, + analysisStartedAt: null, + }, + }); + + // Record in EnrichmentFailure for user visibility + await enrichmentFailureService.recordFailure({ + entityType: "audio", + entityId: track.id, + entityName: trackName, + errorMessage: `Analysis timed out ${MAX_RETRIES} times - track may be corrupted or unsupported`, + errorCode: "MAX_RETRIES_EXCEEDED", + metadata: { + filePath: track.filePath, + retryCount: newRetryCount, + }, + }); + + logger.warn( + `[AudioAnalysisCleanup] Permanently failed: ${trackName}` + ); + permanentlyFailedCount++; + } else { + // Reset to pending for retry + await prisma.track.update({ + where: { id: track.id }, + data: { + analysisStatus: "pending", + analysisStartedAt: null, + analysisRetryCount: newRetryCount, + analysisError: `Reset after stale processing (attempt ${newRetryCount}/${MAX_RETRIES})`, + }, + }); + + logger.debug( + `[AudioAnalysisCleanup] Reset for retry (${newRetryCount}/${MAX_RETRIES}): ${trackName}` + ); + resetCount++; + } + } + + // Update circuit breaker state + if (resetCount > 0 || permanentlyFailedCount > 0) { + this.onFailure(resetCount, permanentlyFailedCount); + logger.debug( + `[AudioAnalysisCleanup] Cleanup complete: ${resetCount} reset, ${permanentlyFailedCount} permanently failed` + ); + } + + return { reset: resetCount, permanentlyFailed: permanentlyFailedCount }; + } + + /** + * Get current analysis statistics + */ + async getStats(): Promise<{ + pending: number; + processing: number; + completed: number; + failed: number; + circuitOpen: boolean; + circuitState: CircuitState; + failureCount: number; + }> { + const [pending, processing, completed, failed] = await Promise.all([ + prisma.track.count({ where: { analysisStatus: "pending" } }), + prisma.track.count({ where: { analysisStatus: "processing" } }), + prisma.track.count({ where: { analysisStatus: "completed" } }), + prisma.track.count({ where: { analysisStatus: "failed" } }), + ]); + + return { + pending, + processing, + completed, + failed, + circuitOpen: this.state === 'open', + circuitState: this.state, + failureCount: this.failureCount, + }; + } +} + +export const audioAnalysisCleanupService = new AudioAnalysisCleanupService(); diff --git a/backend/src/services/audioStreaming.ts b/backend/src/services/audioStreaming.ts index 578c650..56b8183 100644 --- a/backend/src/services/audioStreaming.ts +++ b/backend/src/services/audioStreaming.ts @@ -1,4 +1,5 @@ import * as fs from "fs"; +import { logger } from "../utils/logger"; import * as path from "path"; import * as crypto from "crypto"; import { prisma } from "../utils/db"; @@ -50,7 +51,7 @@ export class AudioStreamingService { // Start cache eviction timer (every 6 hours) this.evictionInterval = setInterval(() => { this.evictCache(this.transcodeCacheMaxGb).catch((err) => { - console.error("Cache eviction failed:", err); + logger.error("Cache eviction failed:", err); }); }, 6 * 60 * 60 * 1000); } @@ -64,12 +65,12 @@ export class AudioStreamingService { sourceModified: Date, sourceAbsolutePath: string ): Promise { - console.log(`[AudioStreaming] Request: trackId=${trackId}, quality=${quality}, source=${path.basename(sourceAbsolutePath)}`); + logger.debug(`[AudioStreaming] Request: trackId=${trackId}, quality=${quality}, source=${path.basename(sourceAbsolutePath)}`); // If original quality requested, return source file if (quality === "original") { const mimeType = this.getMimeType(sourceAbsolutePath); - console.log(`[AudioStreaming] Serving original: mimeType=${mimeType}`); + logger.debug(`[AudioStreaming] Serving original: mimeType=${mimeType}`); return { filePath: sourceAbsolutePath, mimeType, @@ -84,7 +85,7 @@ export class AudioStreamingService { ); if (cachedPath) { - console.log( + logger.debug( `[STREAM] Using cached transcode: ${quality} (${cachedPath})` ); return { @@ -103,7 +104,7 @@ export class AudioStreamingService { : null; if (sourceBitrate && sourceBitrate <= targetBitrate) { - console.log( + logger.debug( `[STREAM] Source bitrate (${sourceBitrate}kbps) <= target (${targetBitrate}kbps), serving original` ); return { @@ -112,7 +113,7 @@ export class AudioStreamingService { }; } } catch (err) { - console.warn( + logger.warn( `[STREAM] Failed to read source metadata, will transcode anyway:`, err ); @@ -122,7 +123,7 @@ export class AudioStreamingService { // Need to transcode - check cache size first const currentSize = await this.getCacheSize(); if (currentSize > this.transcodeCacheMaxGb * 0.9) { - console.log( + logger.debug( `[STREAM] Cache near full (${currentSize.toFixed( 2 )}GB), evicting to 80%...` @@ -131,7 +132,7 @@ export class AudioStreamingService { } // Transcode to cache - console.log( + logger.debug( `[STREAM] Transcoding to ${quality} quality: ${sourceAbsolutePath}` ); const transcodedPath = await this.transcodeToCache( @@ -166,7 +167,7 @@ export class AudioStreamingService { // Invalidate if source file was modified after transcode was created if (cached.sourceModified < sourceModified) { - console.log( + logger.debug( `[STREAM] Cache stale for track ${trackId}, removing...` ); await prisma.transcodedFile.delete({ where: { id: cached.id } }); @@ -191,7 +192,7 @@ export class AudioStreamingService { // Verify file exists if (!fs.existsSync(fullPath)) { - console.log(`[STREAM] Cache file missing: ${fullPath}`); + logger.debug(`[STREAM] Cache file missing: ${fullPath}`); await prisma.transcodedFile.delete({ where: { id: cached.id } }); return null; } @@ -274,7 +275,7 @@ export class AudioStreamingService { }, }); - console.log( + logger.debug( `[STREAM] Transcode complete: ${cacheFileName} (${( stats.size / 1024 / @@ -322,13 +323,13 @@ export class AudioStreamingService { * Evict cache using LRU until size is below target */ async evictCache(targetGb: number): Promise { - console.log(`[CACHE] Starting eviction, target: ${targetGb}GB`); + logger.debug(`[CACHE] Starting eviction, target: ${targetGb}GB`); let currentSize = await this.getCacheSize(); - console.log(`[CACHE] Current size: ${currentSize.toFixed(2)}GB`); + logger.debug(`[CACHE] Current size: ${currentSize.toFixed(2)}GB`); if (currentSize <= targetGb) { - console.log("[CACHE] Below target, no eviction needed"); + logger.debug("[CACHE] Below target, no eviction needed"); return; } @@ -346,7 +347,7 @@ export class AudioStreamingService { try { await fs.promises.unlink(fullPath); } catch (err) { - console.warn(`[CACHE] Failed to delete ${fullPath}:`, err); + logger.warn(`[CACHE] Failed to delete ${fullPath}:`, err); } // Delete from database @@ -356,7 +357,7 @@ export class AudioStreamingService { evicted++; } - console.log( + logger.debug( `[CACHE] Evicted ${evicted} files, new size: ${currentSize.toFixed( 2 )}GB` diff --git a/backend/src/services/audiobookCache.ts b/backend/src/services/audiobookCache.ts index 9656d16..1478378 100644 --- a/backend/src/services/audiobookCache.ts +++ b/backend/src/services/audiobookCache.ts @@ -1,4 +1,5 @@ import { audiobookshelfService } from "./audiobookshelf"; +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; import fs from "fs/promises"; import path from "path"; @@ -19,6 +20,7 @@ interface SyncResult { export class AudiobookCacheService { private coverCacheDir: string; + private coverCacheAvailable: boolean = false; constructor() { // Store covers in: /cover-cache/audiobooks/ @@ -29,6 +31,23 @@ export class AudiobookCacheService { ); } + /** + * Try to ensure cover cache directory exists + * Returns true if available, false if not (permissions issue) + */ + private async ensureCoverCacheDir(): Promise { + try { + await fs.mkdir(this.coverCacheDir, { recursive: true }); + this.coverCacheAvailable = true; + return true; + } catch (error: any) { + logger.warn(`[AUDIOBOOK] Cover cache directory unavailable: ${error.message}`); + logger.warn("[AUDIOBOOK] Covers will be served directly from Audiobookshelf"); + this.coverCacheAvailable = false; + return false; + } + } + /** * Sync all audiobooks from Audiobookshelf to our database */ @@ -41,15 +60,15 @@ export class AudiobookCacheService { }; try { - console.log(" Starting audiobook sync from Audiobookshelf..."); + logger.debug(" Starting audiobook sync from Audiobookshelf..."); - // Ensure cover cache directory exists - await fs.mkdir(this.coverCacheDir, { recursive: true }); + // Try to ensure cover cache directory exists (non-fatal if it fails) + await this.ensureCoverCacheDir(); // Fetch all audiobooks from Audiobookshelf const audiobooks = await audiobookshelfService.getAllAudiobooks(); - console.log( + logger.debug( `[AUDIOBOOK] Found ${audiobooks.length} audiobooks in Audiobookshelf` ); @@ -66,7 +85,7 @@ export class AudiobookCacheService { metadata.author || book.author || "Unknown Author"; - console.log(` Synced: ${title} by ${author}`); + logger.debug(` Synced: ${title} by ${author}`); } catch (error: any) { result.failed++; const metadata = book.media?.metadata || book; @@ -74,23 +93,23 @@ export class AudiobookCacheService { metadata.title || book.title || "Unknown Title"; const errorMsg = `Failed to sync ${title}: ${error.message}`; result.errors.push(errorMsg); - console.error(` ✗ ${errorMsg}`); + logger.error(` ${errorMsg}`); } } - console.log("\nSync Summary:"); - console.log(` Synced: ${result.synced}`); - console.log(` Failed: ${result.failed}`); - console.log(` Skipped: ${result.skipped}`); + logger.debug("\nSync Summary:"); + logger.debug(` Synced: ${result.synced}`); + logger.debug(` Failed: ${result.failed}`); + logger.debug(` Skipped: ${result.skipped}`); if (result.errors.length > 0) { - console.log("\n[ERRORS]:"); - result.errors.forEach((err) => console.log(` - ${err}`)); + logger.debug("\n[ERRORS]:"); + result.errors.forEach((err) => logger.debug(` - ${err}`)); } return result; } catch (error: any) { - console.error(" Audiobook sync failed:", error); + logger.error(" Audiobook sync failed:", error); throw error; } } @@ -106,7 +125,7 @@ export class AudiobookCacheService { // Skip if no title (invalid audiobook data) if (!title) { - console.warn(` Skipping audiobook ${book.id} - missing title`); + logger.warn(` Skipping audiobook ${book.id} - missing title`); return; } @@ -187,7 +206,7 @@ export class AudiobookCacheService { // Log series info for debugging (only for first few books) if (series) { - console.log( + logger.debug( ` [Series] "${title}" -> "${series}" #${ seriesSequence || "?" }` @@ -281,7 +300,7 @@ export class AudiobookCacheService { return null; } catch (error: any) { - console.error( + logger.error( "Failed to get Audiobookshelf base URL:", error.message ); @@ -291,11 +310,17 @@ export class AudiobookCacheService { /** * Download a cover image and save it locally + * Returns null if cover caching is not available (permissions issue) */ private async downloadCover( audiobookId: string, coverUrl: string - ): Promise { + ): Promise { + // Skip cover download if cache directory is not available + if (!this.coverCacheAvailable) { + return null; + } + try { // Get API key for authentication const { getSystemSettings } = await import( @@ -327,11 +352,11 @@ export class AudiobookCacheService { return filePath; } catch (error: any) { - console.error( + logger.error( `Failed to download cover for ${audiobookId}:`, error.message ); - return null as any; // Return null if download fails + return null; } } @@ -350,7 +375,7 @@ export class AudiobookCacheService { audiobook.lastSyncedAt < new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) ) { - console.log( + logger.debug( `[AUDIOBOOK] Audiobook ${audiobookId} not cached or stale, syncing...` ); try { @@ -362,13 +387,13 @@ export class AudiobookCacheService { where: { id: audiobookId }, }); } catch (syncError: any) { - console.warn( + logger.warn( ` Failed to sync audiobook ${audiobookId} from Audiobookshelf:`, syncError.message ); // If we have stale cached data, return it anyway if (audiobook) { - console.log( + logger.debug( ` Using stale cached data for ${audiobookId}` ); } else { @@ -387,6 +412,13 @@ export class AudiobookCacheService { * Clean up old cached covers that are no longer in database */ async cleanupOrphanedCovers(): Promise { + // Ensure cache directory is available + const available = await this.ensureCoverCacheDir(); + if (!available) { + logger.warn("[AUDIOBOOK] Cannot cleanup covers - cache directory unavailable"); + return 0; + } + const audiobooks = await prisma.audiobook.findMany({ select: { localCoverPath: true }, }); @@ -398,14 +430,18 @@ export class AudiobookCacheService { ); let deleted = 0; - const files = await fs.readdir(this.coverCacheDir); + try { + const files = await fs.readdir(this.coverCacheDir); - for (const file of files) { - if (!validCoverPaths.has(file)) { - await fs.unlink(path.join(this.coverCacheDir, file)); - deleted++; - console.log(` [DELETE] Deleted orphaned cover: ${file}`); + for (const file of files) { + if (!validCoverPaths.has(file)) { + await fs.unlink(path.join(this.coverCacheDir, file)); + deleted++; + logger.debug(` [DELETE] Deleted orphaned cover: ${file}`); + } } + } catch (error: any) { + logger.warn(`[AUDIOBOOK] Failed to read cover cache directory: ${error.message}`); } return deleted; diff --git a/backend/src/services/audiobookshelf.ts b/backend/src/services/audiobookshelf.ts index 436af78..003deb7 100644 --- a/backend/src/services/audiobookshelf.ts +++ b/backend/src/services/audiobookshelf.ts @@ -1,5 +1,7 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import { getSystemSettings } from "../utils/systemSettings"; +import { prisma } from "../utils/db"; /** * Audiobookshelf API Service @@ -33,13 +35,13 @@ class AudiobookshelfService { this.baseUrl = settings.audiobookshelfUrl.replace(/\/$/, ""); // Remove trailing slash this.apiKey = settings.audiobookshelfApiKey; this.client = axios.create({ - baseURL: this.baseUrl, + baseURL: this.baseUrl as string, headers: { Authorization: `Bearer ${this.apiKey}`, }, timeout: 30000, // 30 seconds for remote server }); - console.log("Audiobookshelf configured from database"); + logger.debug("Audiobookshelf configured from database"); this.initialized = true; return; } @@ -47,7 +49,7 @@ class AudiobookshelfService { if (error.message === "Audiobookshelf is disabled in settings") { throw error; } - console.log( + logger.debug( " Could not load Audiobookshelf from database, checking .env" ); } @@ -66,7 +68,7 @@ class AudiobookshelfService { }, timeout: 30000, // 30 seconds for remote server }); - console.log("Audiobookshelf configured from .env"); + logger.debug("Audiobookshelf configured from .env"); this.initialized = true; } else { throw new Error("Audiobookshelf not configured"); @@ -82,7 +84,7 @@ class AudiobookshelfService { const response = await this.client!.get("/api/libraries"); return response.status === 200; } catch (error) { - console.error("Audiobookshelf connection failed:", error); + logger.error("Audiobookshelf connection failed:", error); return false; } } @@ -122,16 +124,22 @@ class AudiobookshelfService { // DEBUG: Log the structure of the first item with series if (items.length > 0) { - const itemsWithSeries = items.filter((item: any) => - item.media?.metadata?.series || item.media?.metadata?.seriesName + const itemsWithSeries = items.filter( + (item: any) => + item.media?.metadata?.series || + item.media?.metadata?.seriesName ); if (itemsWithSeries.length > 0) { - console.log( + logger.debug( "[AUDIOBOOKSHELF DEBUG] Sample item WITH series:", - JSON.stringify(itemsWithSeries[0], null, 2).substring(0, 2000) + JSON.stringify( + itemsWithSeries[0], + null, + 2 + ).substring(0, 2000) ); } else { - console.log( + logger.debug( "[AUDIOBOOKSHELF DEBUG] No items with series found! Sample item:", JSON.stringify(items[0], null, 2).substring(0, 1000) ); @@ -169,7 +177,7 @@ class AudiobookshelfService { try { return await this.getLibraryItems(library.id); } catch (error) { - console.error( + logger.error( `Audiobookshelf: failed to load podcast library ${library.id}`, error ); @@ -330,6 +338,119 @@ class AudiobookshelfService { ); return response.data.book || []; } + + /** + * Sync audiobooks from Audiobookshelf to local database cache + * This populates the Audiobook table for full-text search + */ + async syncAudiobooksToCache() { + await this.ensureInitialized(); + logger.debug("[AUDIOBOOKSHELF] Starting audiobook sync to cache..."); + + try { + // Fetch all audiobooks from Audiobookshelf API + const audiobooks = await this.getAllAudiobooks(); + logger.debug( + `[AUDIOBOOKSHELF] Found ${audiobooks.length} audiobooks to sync` + ); + + // Map and upsert each audiobook to database + let syncedCount = 0; + for (const item of audiobooks) { + try { + const metadata = item.media?.metadata || {}; + + // Extract series information (check both possible formats) + let series: string | null = null; + let seriesSequence: string | null = null; + + if (metadata.series && Array.isArray(metadata.series) && metadata.series.length > 0) { + series = metadata.series[0].name || null; + seriesSequence = metadata.series[0].sequence || null; + } else if (metadata.seriesName) { + series = metadata.seriesName; + seriesSequence = metadata.seriesSequence || null; + } + + await prisma.audiobook.upsert({ + where: { id: item.id }, + update: { + title: metadata.title || "Untitled", + author: metadata.authorName || metadata.author || null, + narrator: metadata.narratorName || metadata.narrator || null, + description: metadata.description || null, + publishedYear: metadata.publishedYear + ? parseInt(metadata.publishedYear, 10) + : null, + publisher: metadata.publisher || null, + series, + seriesSequence, + duration: item.media?.duration || null, + numTracks: item.media?.numTracks || null, + numChapters: item.media?.numChapters || null, + size: item.media?.size + ? BigInt(item.media.size) + : null, + isbn: metadata.isbn || null, + asin: metadata.asin || null, + language: metadata.language || null, + genres: metadata.genres || [], + tags: item.media?.tags || [], + coverUrl: metadata.coverPath + ? `${this.baseUrl}${metadata.coverPath}` + : null, + audioUrl: `${this.baseUrl}/api/items/${item.id}/play`, + libraryId: item.libraryId || null, + lastSyncedAt: new Date(), + }, + create: { + id: item.id, + title: metadata.title || "Untitled", + author: metadata.authorName || metadata.author || null, + narrator: metadata.narratorName || metadata.narrator || null, + description: metadata.description || null, + publishedYear: metadata.publishedYear + ? parseInt(metadata.publishedYear, 10) + : null, + publisher: metadata.publisher || null, + series, + seriesSequence, + duration: item.media?.duration || null, + numTracks: item.media?.numTracks || null, + numChapters: item.media?.numChapters || null, + size: item.media?.size + ? BigInt(item.media.size) + : null, + isbn: metadata.isbn || null, + asin: metadata.asin || null, + language: metadata.language || null, + genres: metadata.genres || [], + tags: item.media?.tags || [], + coverUrl: metadata.coverPath + ? `${this.baseUrl}${metadata.coverPath}` + : null, + audioUrl: `${this.baseUrl}/api/items/${item.id}/play`, + libraryId: item.libraryId || null, + }, + }); + syncedCount++; + } catch (error) { + logger.error( + `[AUDIOBOOKSHELF] Failed to sync audiobook ${item.id}:`, + error + ); + } + } + + logger.debug( + `[AUDIOBOOKSHELF] Successfully synced ${syncedCount}/${audiobooks.length} audiobooks to cache` + ); + return { synced: syncedCount, total: audiobooks.length }; + } catch (error) { + logger.error("[AUDIOBOOKSHELF] Audiobook sync failed:", error); + throw error; + } + } } export const audiobookshelfService = new AudiobookshelfService(); diff --git a/backend/src/services/coverArt.ts b/backend/src/services/coverArt.ts index af790f7..3a1f5d8 100644 --- a/backend/src/services/coverArt.ts +++ b/backend/src/services/coverArt.ts @@ -1,4 +1,5 @@ import axios from "axios"; +import { logger } from "../utils/logger"; import { redisClient } from "../utils/redis"; import { rateLimiter } from "./rateLimiter"; @@ -13,7 +14,7 @@ class CoverArtService { if (cached === "NOT_FOUND") return null; // Cached negative result if (cached) return cached; } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -35,7 +36,7 @@ class CoverArtService { try { await redisClient.setEx(cacheKey, 2592000, coverUrl); // 30 days } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return coverUrl; @@ -57,7 +58,7 @@ class CoverArtService { } return null; } - console.error(`Cover art error for ${rgMbid}:`, error.message); + logger.error(`Cover art error for ${rgMbid}:`, error.message); } return null; diff --git a/backend/src/services/coverArtExtractor.ts b/backend/src/services/coverArtExtractor.ts index e460739..9cd6bb7 100644 --- a/backend/src/services/coverArtExtractor.ts +++ b/backend/src/services/coverArtExtractor.ts @@ -1,4 +1,5 @@ import * as fs from "fs"; +import { logger } from "../utils/logger"; import * as path from "path"; import * as crypto from "crypto"; import { parseFile } from "music-metadata"; @@ -44,13 +45,13 @@ export class CoverArtExtractor { // Save to cache await fs.promises.writeFile(cachePath, picture.data); - console.log( + logger.debug( `[COVER-ART] Extracted cover art from ${path.basename(audioFilePath)}: ${cacheFileName}` ); return cacheFileName; } catch (err) { - console.error( + logger.error( `[COVER-ART] Failed to extract from ${audioFilePath}:`, err ); diff --git a/backend/src/services/dataCache.ts b/backend/src/services/dataCache.ts index 39bfce1..b5259cc 100644 --- a/backend/src/services/dataCache.ts +++ b/backend/src/services/dataCache.ts @@ -10,6 +10,7 @@ * - All fetched data is persisted for future use */ +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; import { redisClient } from "../utils/redis"; import { fanartService } from "./fanart"; @@ -38,15 +39,16 @@ class DataCacheService { try { const artist = await prisma.artist.findUnique({ where: { id: artistId }, - select: { heroUrl: true }, + select: { heroUrl: true, userHeroUrl: true }, }); - if (artist?.heroUrl) { + const displayHeroUrl = artist?.userHeroUrl ?? artist?.heroUrl; + if (displayHeroUrl) { // Also populate Redis for faster future reads - this.setRedisCache(cacheKey, artist.heroUrl, ARTIST_IMAGE_TTL); - return artist.heroUrl; + this.setRedisCache(cacheKey, displayHeroUrl, ARTIST_IMAGE_TTL); + return displayHeroUrl; } } catch (err) { - console.warn("[DataCache] DB lookup failed for artist:", artistId); + logger.warn("[DataCache] DB lookup failed for artist:", artistId); } // 2. Check Redis cache @@ -98,7 +100,7 @@ class DataCacheService { return album.coverUrl; } } catch (err) { - console.warn("[DataCache] DB lookup failed for album:", albumId); + logger.warn("[DataCache] DB lookup failed for album:", albumId); } // 2. Check Redis cache @@ -155,14 +157,15 @@ class DataCacheService { * Only returns what's already cached, doesn't make API calls */ async getArtistImagesBatch( - artists: Array<{ id: string; heroUrl?: string | null }> + artists: Array<{ id: string; heroUrl?: string | null; userHeroUrl?: string | null }> ): Promise> { const results = new Map(); - // First, use any heroUrls already in the data + // First, use any heroUrls/userHeroUrls already in the data (with override pattern) for (const artist of artists) { - if (artist.heroUrl) { - results.set(artist.id, artist.heroUrl); + const displayHeroUrl = artist.userHeroUrl ?? artist.heroUrl; + if (displayHeroUrl) { + results.set(artist.id, displayHeroUrl); } } @@ -242,7 +245,7 @@ class DataCacheService { try { heroUrl = await fanartService.getArtistImage(mbid); if (heroUrl) { - console.log(`[DataCache] Got image from Fanart.tv for ${artistName}`); + logger.debug(`[DataCache] Got image from Fanart.tv for ${artistName}`); return heroUrl; } } catch (err) { @@ -254,7 +257,7 @@ class DataCacheService { try { heroUrl = await deezerService.getArtistImage(artistName); if (heroUrl) { - console.log(`[DataCache] Got image from Deezer for ${artistName}`); + logger.debug(`[DataCache] Got image from Deezer for ${artistName}`); return heroUrl; } } catch (err) { @@ -275,7 +278,7 @@ class DataCacheService { // Filter out Last.fm placeholder images const imageUrl = largestImage["#text"]; if (!imageUrl.includes("2a96cbd8b46e442fc41c2b86b821562f")) { - console.log(`[DataCache] Got image from Last.fm for ${artistName}`); + logger.debug(`[DataCache] Got image from Last.fm for ${artistName}`); return imageUrl; } } @@ -284,7 +287,7 @@ class DataCacheService { // Last.fm failed } - console.log(`[DataCache] No image found for ${artistName}`); + logger.debug(`[DataCache] No image found for ${artistName}`); return null; } @@ -298,7 +301,7 @@ class DataCacheService { data: { heroUrl }, }); } catch (err) { - console.warn("[DataCache] Failed to update artist heroUrl:", err); + logger.warn("[DataCache] Failed to update artist heroUrl:", err); } } @@ -312,7 +315,7 @@ class DataCacheService { data: { coverUrl }, }); } catch (err) { - console.warn("[DataCache] Failed to update album coverUrl:", err); + logger.warn("[DataCache] Failed to update album coverUrl:", err); } } @@ -327,12 +330,32 @@ class DataCacheService { } } + /** + * Set multiple Redis cache entries using pipelining + * Uses MULTI/EXEC for atomic batch writes + */ + private async setRedisCacheBatch( + entries: Array<{ key: string; value: string; ttl: number }> + ): Promise { + if (entries.length === 0) return; + + try { + const multi = redisClient.multi(); + for (const { key, value, ttl } of entries) { + multi.setEx(key, ttl, value); + } + await multi.exec(); + } catch (err) { + logger.warn("[DataCache] Batch cache write failed:", err); + } + } + /** * Warm up Redis cache from database * Called on server startup */ async warmupCache(): Promise { - console.log("[DataCache] Warming up Redis cache from database..."); + logger.debug("[DataCache] Warming up Redis cache from database..."); try { // Warm up artist images @@ -341,14 +364,16 @@ class DataCacheService { select: { id: true, heroUrl: true }, }); - let artistCount = 0; - for (const artist of artists) { - if (artist.heroUrl) { - await this.setRedisCache(`hero:${artist.id}`, artist.heroUrl, ARTIST_IMAGE_TTL); - artistCount++; - } - } - console.log(`[DataCache] Cached ${artistCount} artist images`); + const artistEntries = artists + .filter((a) => a.heroUrl) + .map((a) => ({ + key: `hero:${a.id}`, + value: a.heroUrl!, + ttl: ARTIST_IMAGE_TTL, + })); + + await this.setRedisCacheBatch(artistEntries); + logger.debug(`[DataCache] Cached ${artistEntries.length} artist images`); // Warm up album covers const albums = await prisma.album.findMany({ @@ -356,18 +381,20 @@ class DataCacheService { select: { id: true, coverUrl: true }, }); - let albumCount = 0; - for (const album of albums) { - if (album.coverUrl) { - await this.setRedisCache(`album-cover:${album.id}`, album.coverUrl, ALBUM_COVER_TTL); - albumCount++; - } - } - console.log(`[DataCache] Cached ${albumCount} album covers`); + const albumEntries = albums + .filter((a) => a.coverUrl) + .map((a) => ({ + key: `album-cover:${a.id}`, + value: a.coverUrl!, + ttl: ALBUM_COVER_TTL, + })); - console.log("[DataCache] Cache warmup complete"); + await this.setRedisCacheBatch(albumEntries); + logger.debug(`[DataCache] Cached ${albumEntries.length} album covers`); + + logger.debug("[DataCache] Cache warmup complete"); } catch (err) { - console.error("[DataCache] Cache warmup failed:", err); + logger.error("[DataCache] Cache warmup failed:", err); } } } diff --git a/backend/src/services/deezer.ts b/backend/src/services/deezer.ts index 6d03229..acd42be 100644 --- a/backend/src/services/deezer.ts +++ b/backend/src/services/deezer.ts @@ -1,4 +1,5 @@ import axios from "axios"; +import { logger } from "../utils/logger"; import { redisClient } from "../utils/redis"; /** @@ -91,7 +92,7 @@ class DeezerService { */ private async setCache(key: string, value: string): Promise { try { - await redisClient.setex(`${this.cachePrefix}${key}`, this.cacheTTL, value); + await redisClient.setEx(`${this.cachePrefix}${key}`, this.cacheTTL, value); } catch { // Ignore cache errors } @@ -121,7 +122,7 @@ class DeezerService { await this.setCache(cacheKey, imageUrl || "null"); return imageUrl; } catch (error: any) { - console.error(`Deezer artist image error for ${artistName}:`, error.message); + logger.error(`Deezer artist image error for ${artistName}:`, error.message); return null; } } @@ -157,7 +158,7 @@ class DeezerService { await this.setCache(cacheKey, coverUrl || "null"); return coverUrl; } catch (error: any) { - console.error(`Deezer album cover error for ${artistName} - ${albumName}:`, error.message); + logger.error(`Deezer album cover error for ${artistName} - ${albumName}:`, error.message); return null; } } @@ -182,7 +183,7 @@ class DeezerService { await this.setCache(cacheKey, previewUrl || "null"); return previewUrl; } catch (error: any) { - console.error(`Deezer track preview error for ${artistName} - ${trackName}:`, error.message); + logger.error(`Deezer track preview error for ${artistName} - ${trackName}:`, error.message); return null; } } @@ -218,7 +219,7 @@ class DeezerService { */ async getPlaylist(playlistId: string): Promise { try { - console.log(`Deezer: Fetching playlist ${playlistId}...`); + logger.debug(`Deezer: Fetching playlist ${playlistId}...`); const response = await axios.get(`${DEEZER_API}/playlist/${playlistId}`, { timeout: 15000, @@ -226,7 +227,7 @@ class DeezerService { const data = response.data; if (data.error) { - console.error("Deezer API error:", data.error); + logger.error("Deezer API error:", data.error); return null; } @@ -242,7 +243,7 @@ class DeezerService { coverUrl: track.album?.cover_medium || track.album?.cover || null, })); - console.log(`Deezer: Fetched playlist "${data.title}" with ${tracks.length} tracks`); + logger.debug(`Deezer: Fetched playlist "${data.title}" with ${tracks.length} tracks`); return { id: String(data.id), @@ -255,7 +256,7 @@ class DeezerService { isPublic: data.public ?? true, }; } catch (error: any) { - console.error("Deezer playlist fetch error:", error.message); + logger.error("Deezer playlist fetch error:", error.message); return null; } } @@ -280,7 +281,7 @@ class DeezerService { fans: playlist.fans || 0, })); } catch (error: any) { - console.error("Deezer chart playlists error:", error.message); + logger.error("Deezer chart playlists error:", error.message); return []; } } @@ -305,7 +306,7 @@ class DeezerService { fans: 0, })); } catch (error: any) { - console.error("Deezer playlist search error:", error.message); + logger.error("Deezer playlist search error:", error.message); return []; } } @@ -319,7 +320,7 @@ class DeezerService { const cacheKey = `playlists:featured:${limit}`; const cached = await this.getCached(cacheKey); if (cached) { - console.log("Deezer: Returning cached featured playlists"); + logger.debug("Deezer: Returning cached featured playlists"); return JSON.parse(cached); } @@ -328,7 +329,7 @@ class DeezerService { const seenIds = new Set(); // 1. Get chart playlists (max 99 available) - console.log("Deezer: Fetching chart playlists from API..."); + logger.debug("Deezer: Fetching chart playlists from API..."); const chartPlaylists = await this.getChartPlaylists(Math.min(limit, 99)); for (const p of chartPlaylists) { if (!seenIds.has(p.id)) { @@ -336,7 +337,7 @@ class DeezerService { allPlaylists.push(p); } } - console.log(`Deezer: Got ${chartPlaylists.length} chart playlists`); + logger.debug(`Deezer: Got ${chartPlaylists.length} chart playlists`); // 2. If we need more, search for popular genre playlists if (allPlaylists.length < limit) { @@ -360,11 +361,11 @@ class DeezerService { } const result = allPlaylists.slice(0, limit); - console.log(`Deezer: Caching ${result.length} featured playlists`); + logger.debug(`Deezer: Caching ${result.length} featured playlists`); await this.setCache(cacheKey, JSON.stringify(result)); return result; } catch (error: any) { - console.error("Deezer featured playlists error:", error.message); + logger.error("Deezer featured playlists error:", error.message); return []; } } @@ -380,12 +381,12 @@ class DeezerService { const cacheKey = "genres:all"; const cached = await this.getCached(cacheKey); if (cached) { - console.log("Deezer: Returning cached genres"); + logger.debug("Deezer: Returning cached genres"); return JSON.parse(cached); } try { - console.log("Deezer: Fetching genres from API..."); + logger.debug("Deezer: Fetching genres from API..."); const response = await axios.get(`${DEEZER_API}/genre`, { timeout: 10000, }); @@ -398,11 +399,11 @@ class DeezerService { imageUrl: genre.picture_medium || genre.picture || null, })); - console.log(`Deezer: Caching ${genres.length} genres`); + logger.debug(`Deezer: Caching ${genres.length} genres`); await this.setCache(cacheKey, JSON.stringify(genres)); return genres; } catch (error: any) { - console.error("Deezer genres error:", error.message); + logger.error("Deezer genres error:", error.message); return []; } } @@ -426,12 +427,12 @@ class DeezerService { const cacheKey = "radio:stations"; const cached = await this.getCached(cacheKey); if (cached) { - console.log("Deezer: Returning cached radio stations"); + logger.debug("Deezer: Returning cached radio stations"); return JSON.parse(cached); } try { - console.log("Deezer: Fetching radio stations from API..."); + logger.debug("Deezer: Fetching radio stations from API..."); const response = await axios.get(`${DEEZER_API}/radio`, { timeout: 10000, }); @@ -444,11 +445,11 @@ class DeezerService { type: "radio" as const, })); - console.log(`Deezer: Got ${stations.length} radio stations, caching...`); + logger.debug(`Deezer: Got ${stations.length} radio stations, caching...`); await this.setCache(cacheKey, JSON.stringify(stations)); return stations; } catch (error: any) { - console.error("Deezer radio stations error:", error.message); + logger.error("Deezer radio stations error:", error.message); return []; } } @@ -464,12 +465,12 @@ class DeezerService { const cacheKey = "radio:by-genre"; const cached = await this.getCached(cacheKey); if (cached) { - console.log("Deezer: Returning cached radios by genre"); + logger.debug("Deezer: Returning cached radios by genre"); return JSON.parse(cached); } try { - console.log("Deezer: Fetching radios by genre from API..."); + logger.debug("Deezer: Fetching radios by genre from API..."); const response = await axios.get(`${DEEZER_API}/radio/genres`, { timeout: 10000, }); @@ -486,11 +487,11 @@ class DeezerService { })), })); - console.log(`Deezer: Got ${genres.length} genre categories with radios, caching...`); + logger.debug(`Deezer: Got ${genres.length} genre categories with radios, caching...`); await this.setCache(cacheKey, JSON.stringify(genres)); return genres; } catch (error: any) { - console.error("Deezer radios by genre error:", error.message); + logger.error("Deezer radios by genre error:", error.message); return []; } } @@ -500,7 +501,7 @@ class DeezerService { */ async getRadioTracks(radioId: string): Promise { try { - console.log(`Deezer: Fetching radio ${radioId} tracks...`); + logger.debug(`Deezer: Fetching radio ${radioId} tracks...`); // First get radio info const infoResponse = await axios.get(`${DEEZER_API}/radio/${radioId}`, { @@ -526,7 +527,7 @@ class DeezerService { coverUrl: track.album?.cover_medium || track.album?.cover || null, })); - console.log(`Deezer: Fetched radio "${radioInfo.title}" with ${tracks.length} tracks`); + logger.debug(`Deezer: Fetched radio "${radioInfo.title}" with ${tracks.length} tracks`); return { id: `radio-${radioId}`, @@ -539,7 +540,7 @@ class DeezerService { isPublic: true, }; } catch (error: any) { - console.error("Deezer radio tracks error:", error.message); + logger.error("Deezer radio tracks error:", error.message); return null; } } @@ -578,7 +579,7 @@ class DeezerService { return { playlists, radios }; } catch (error: any) { - console.error("Deezer editorial content error:", error.message); + logger.error("Deezer editorial content error:", error.message); return { playlists: [], radios: [] }; } } diff --git a/backend/src/services/discoverWeekly.ts b/backend/src/services/discoverWeekly.ts index ff3610e..8fbdde2 100644 --- a/backend/src/services/discoverWeekly.ts +++ b/backend/src/services/discoverWeekly.ts @@ -11,16 +11,17 @@ * - No dynamic imports */ +import { logger } from "../utils/logger"; import { Prisma } from "@prisma/client"; import { prisma } from "../utils/db"; import { lastFmService } from "./lastfm"; import { musicBrainzService } from "./musicbrainz"; -import { simpleDownloadManager } from "./simpleDownloadManager"; import { lidarrService } from "./lidarr"; import { scanQueue } from "../workers/queues"; import { startOfWeek, subWeeks } from "date-fns"; import { getSystemSettings } from "../utils/systemSettings"; import { discoveryLogger } from "./discoveryLogger"; +import { acquisitionService } from "./acquisitionService"; interface SeedArtist { name: string; @@ -55,16 +56,16 @@ interface BatchLogEntry { * Calculate tier from Last.fm similarity score * Last.fm typically returns scores in 0.5-0.9 range for similar artists * Adjusted thresholds for better distribution: - * - High Match: 70-100% (0.7-1.0) - * - Medium Match: 50-69% (0.5-0.69) - * - Explore: 30-49% (0.3-0.49) + * - High Match: 60-100% (0.6-1.0) + * - Medium Match: 45-59% (0.45-0.59) + * - Explore: 30-44% (0.3-0.44) * - Wild Card: 0-29% (0-0.29) or explicitly set */ function getTierFromSimilarity( similarity: number ): "high" | "medium" | "explore" | "wildcard" { - if (similarity >= 0.7) return "high"; - if (similarity >= 0.5) return "medium"; + if (similarity >= 0.6) return "high"; + if (similarity >= 0.45) return "medium"; if (similarity >= 0.3) return "explore"; return "wildcard"; } @@ -80,7 +81,7 @@ export class DiscoverWeeklyService { userId: string, settings: any ): Promise { - console.log(`\n Processing previous discovery albums...`); + logger.debug(`\n Processing previous discovery albums...`); // Find all active discovery albums for this user const discoveryAlbums = await prisma.discoveryAlbum.findMany({ @@ -91,7 +92,7 @@ export class DiscoverWeeklyService { }); if (discoveryAlbums.length === 0) { - console.log(` No previous discovery albums to process`); + logger.debug(` No previous discovery albums to process`); return; } @@ -100,8 +101,8 @@ export class DiscoverWeeklyService { (a) => a.status === "ACTIVE" ); - console.log(` Found ${likedAlbums.length} liked albums to keep`); - console.log( + logger.debug(` Found ${likedAlbums.length} liked albums to keep`); + logger.debug( ` Found ${activeAlbums.length} non-liked albums to remove` ); @@ -137,7 +138,7 @@ export class DiscoverWeeklyService { update: {}, }); - console.log( + logger.debug( ` Moved to library: ${album.artistName} - ${album.albumTitle}` ); } @@ -148,7 +149,7 @@ export class DiscoverWeeklyService { data: { status: "MOVED" }, }); } catch (error: any) { - console.error( + logger.error( ` ✗ Failed to move ${album.albumTitle}: ${error.message}` ); } @@ -176,7 +177,7 @@ export class DiscoverWeeklyService { ); } catch (lidarrError: any) { if (lidarrError.response?.status !== 404) { - console.log( + logger.debug( ` Lidarr delete failed: ${lidarrError.message}` ); } @@ -206,11 +207,11 @@ export class DiscoverWeeklyService { data: { status: "DELETED" }, }); - console.log( + logger.debug( ` Deleted: ${album.artistName} - ${album.albumTitle}` ); } catch (error: any) { - console.error( + logger.error( ` ✗ Failed to delete ${album.albumTitle}: ${error.message}` ); } @@ -219,7 +220,7 @@ export class DiscoverWeeklyService { // Clean up unavailable albums from previous week await prisma.unavailableAlbum.deleteMany({ where: { userId } }); - console.log(` Previous discovery cleanup complete`); + logger.debug(` Previous discovery cleanup complete`); } /** @@ -252,7 +253,7 @@ export class DiscoverWeeklyService { }); } catch (error) { // Don't fail if logging fails - console.error("Failed to add batch log:", error); + logger.error("Failed to add batch log:", error); } } @@ -265,27 +266,8 @@ export class DiscoverWeeklyService { discoveryLogger.info(`Log file: ${logPath}`); try { - // Check if Lidarr is enabled and configured discoveryLogger.section("CONFIGURATION CHECK"); const settings = await getSystemSettings(); - if ( - !settings?.lidarrEnabled || - !settings?.lidarrUrl || - !settings?.lidarrApiKey - ) { - discoveryLogger.error("Lidarr must be enabled and configured"); - discoveryLogger.end(false, "Lidarr not configured"); - throw new Error( - "Lidarr must be enabled and configured to use Discovery Weekly" - ); - } - discoveryLogger.success("Lidarr configured"); - discoveryLogger.table({ - "Lidarr URL": settings.lidarrUrl, - "API Key": settings.lidarrApiKey - ? "***" + settings.lidarrApiKey.slice(-4) - : "not set", - }); const weekStart = startOfWeek(new Date(), { weekStartsOn: 1 }); @@ -448,13 +430,13 @@ export class DiscoverWeeklyService { }); if (existingJob) { - console.log( + logger.debug( ` Skipping job: ${album.artistName} - ${album.albumTitle} (already in queue: ${existingJob.id})` ); continue; } - console.log( + logger.debug( ` Creating job: ${album.artistName} - ${album.albumTitle} (similarity: ${similarity}, tier: ${album.tier})` ); @@ -494,49 +476,98 @@ export class DiscoverWeeklyService { const jobs = await prisma.downloadJob.findMany({ where: { discoveryBatchId: batch.id }, }); - - for (const job of jobs) { + + // Create concurrent acquisition promises + const acquisitionPromises = jobs.map(async (job) => { const metadata = job.metadata as any; - try { - const result = await simpleDownloadManager.startDownload( - job.id, - metadata.artistName, - metadata.albumTitle, - metadata.albumMbid, - userId, - true // isDiscovery - tag artist in Lidarr for cleanup - ); - - if (result.success) { - downloadsStarted++; - discoveryLogger.success( - `Started: ${metadata.artistName} - ${metadata.albumTitle}`, - 1 - ); - } else { - downloadsFailed++; - discoveryLogger.error( - `Failed: ${metadata.albumTitle} - ${result.error}`, - 1 - ); - await this.addBatchLog( - batch.id, - "error", - `Failed to start: ${metadata.albumTitle} - ${result.error}` - ); + + discoveryLogger.info( + `Acquiring: ${metadata.artistName} - ${metadata.albumTitle}`, + 1 + ); + + const result = await acquisitionService.acquireAlbum( + { + albumTitle: metadata.albumTitle, + artistName: metadata.artistName, + mbid: metadata.albumMbid, + lastfmUrl: undefined, + }, + { + userId: userId, + discoveryBatchId: batch.id, + existingJobId: job.id, } - } catch (error: any) { - downloadsFailed++; - discoveryLogger.error( - `Error: ${metadata.albumTitle}: ${error.message}`, + ); + + if (result.success) { + discoveryLogger.success( + `Acquired via ${result.source}: ${metadata.artistName} - ${metadata.albumTitle}`, 1 ); + + const newStatus = result.source === "soulseek" ? "completed" : "processing"; + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + status: newStatus, + lidarrRef: result.correlationId || null, + completedAt: newStatus === "completed" ? new Date() : null, + }, + }); + } else { + discoveryLogger.error( + `Failed to acquire: ${metadata.albumTitle} - ${result.error}`, + 1 + ); + + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + status: "failed", + error: result.error, + completedAt: new Date(), + }, + }); + await this.addBatchLog( batch.id, "error", - `Error starting: ${metadata.albumTitle} - ${error.message}` + `Failed to acquire ${metadata.albumTitle}: ${result.error}` ); } + + return { job, result }; + }); + + // Execute all acquisitions concurrently + const results = await Promise.allSettled(acquisitionPromises); + + // Process results and update counters + results.forEach((settledResult, index) => { + if (settledResult.status === 'fulfilled') { + const { result } = settledResult.value; + if (result.success) { + downloadsStarted++; + } else { + downloadsFailed++; + } + } else { + downloadsFailed++; + const job = jobs[index]; + const metadata = job.metadata as any; + logger.error(`[Discover] Failed to acquire ${metadata.albumTitle}: ${settledResult.reason}`); + } + }); + + // Log batch completion summary + logger.info(`[Discover] Batch complete: ${downloadsStarted} succeeded, ${downloadsFailed} failed`); + + // After all download attempts, check if batch should be completed + // This handles cases where downloads fail before webhooks are triggered + if (downloadsStarted === 0 || downloadsFailed > 0) { + logger.debug(`[Discovery] Checking batch completion (started: ${downloadsStarted}, failed: ${downloadsFailed})`); + await this.checkBatchCompletion(batch.id); } discoveryLogger.section("GENERATION COMPLETE"); @@ -602,14 +633,14 @@ export class DiscoverWeeklyService { if (isRetryable && attempt < maxRetries) { const delay = Math.pow(2, attempt) * 500; // 1s, 2s, 4s - console.warn( + logger.warn( ` Retry ${attempt}/${maxRetries} for ${seed.name} in ${delay}ms (${error.message})` ); await new Promise((r) => setTimeout(r, delay)); continue; } - console.warn( + logger.warn( ` Failed to get similar artists for ${seed.name}: ${error.message}` ); return []; @@ -672,7 +703,7 @@ export class DiscoverWeeklyService { // Absolute timeout - fail any batch older than 2 hours regardless of state if (batchAge > ABSOLUTE_MAX_TIMEOUT) { - console.log( + logger.debug( `\n⏰ [BATCH FORCE FAIL] Batch ${batch.id} is ${Math.round( batchAge / 3600000 )}h old - force failing` @@ -711,12 +742,12 @@ export class DiscoverWeeklyService { : BATCH_TIMEOUT_NO_COMPLETIONS; if (batchAge > timeout && pendingJobs.length > 0) { - console.log( + logger.debug( `\n⏰ [BATCH TIMEOUT] Batch ${ batch.id } stuck for ${Math.round(batchAge / 60000)}min` ); - console.log( + logger.debug( ` Completed: ${completedJobs.length}, Pending: ${pendingJobs.length}` ); @@ -733,7 +764,7 @@ export class DiscoverWeeklyService { }, }); - console.log( + logger.debug( ` Marked ${pendingJobs.length} pending jobs as failed` ); @@ -750,7 +781,7 @@ export class DiscoverWeeklyService { * Check if discovery batch is complete and trigger final steps */ async checkBatchCompletion(batchId: string) { - console.log(`\n[BATCH ${batchId}] Checking completion...`); + logger.debug(`\n[BATCH ${batchId}] Checking completion...`); const batch = await prisma.discoveryBatch.findUnique({ where: { id: batchId }, @@ -758,7 +789,7 @@ export class DiscoverWeeklyService { }); if (!batch) { - console.log(`[BATCH ${batchId}] Not found - skipping`); + logger.debug(`[BATCH ${batchId}] Not found - skipping`); return; } @@ -768,7 +799,7 @@ export class DiscoverWeeklyService { batch.status === "failed" || batch.status === "scanning" ) { - console.log( + logger.debug( `[BATCH ${batchId}] Already ${batch.status} - skipping` ); return; @@ -788,21 +819,22 @@ export class DiscoverWeeklyService { const failed = failedJobs.length; const total = batch.jobs.length; - console.log( + logger.debug( `[BATCH ${batchId}] Status: ${completed} completed, ${failed} failed, ${pendingJobs.length} pending (total: ${total})` ); // Wait for ALL downloads to complete/fail if (pendingJobs.length > 0) { - console.log( + logger.debug( `[BATCH ${batchId}] Still waiting for ${pendingJobs.length} downloads` ); return; } - console.log( - `[BATCH ${batchId}] All jobs done! Transitioning to scan phase...` - ); + // Wait for Lidarr to finish importing files + logger.debug(`[BATCH ${batchId}] All jobs done! Waiting 60s for Lidarr to finish importing...`); + await new Promise(resolve => setTimeout(resolve, 60000)); + logger.debug(`[BATCH ${batchId}] Transitioning to scan phase...`); // All jobs finished - use transaction to update batch and create unavailable records await prisma.$transaction(async (tx) => { @@ -866,7 +898,7 @@ export class DiscoverWeeklyService { }); if (completed === 0) { - console.log(` All downloads failed`); + logger.debug(` All downloads failed`); await this.addBatchLog(batchId, "error", "All downloads failed"); // Cleanup failed artists from Lidarr @@ -875,7 +907,7 @@ export class DiscoverWeeklyService { } // All successful downloads will be included in the playlist - console.log( + logger.debug( ` ${completed} albums ready for playlist. Triggering scan...` ); await this.addBatchLog( @@ -891,7 +923,7 @@ export class DiscoverWeeklyService { discoveryBatchId: batchId, }); - console.log( + logger.debug( ` Scan queued - will build playlist after scan completes` ); } @@ -900,14 +932,14 @@ export class DiscoverWeeklyService { * Build final playlist after scan completes (atomic transaction) */ async buildFinalPlaylist(batchId: string) { - console.log(`\n Building final playlist for batch ${batchId}...`); + logger.debug(`\n Building final playlist for batch ${batchId}...`); const batch = await prisma.discoveryBatch.findUnique({ where: { id: batchId }, }); if (!batch) { - console.log(` Batch not found`); + logger.debug(` Batch not found`); return; } @@ -919,7 +951,7 @@ export class DiscoverWeeklyService { }, }); - console.log(` Found ${completedJobs.length} completed downloads`); + logger.debug(` Found ${completedJobs.length} completed downloads`); await this.addBatchLog( batchId, "info", @@ -938,11 +970,11 @@ export class DiscoverWeeklyService { }) .filter((c) => c.artistName && c.albumTitle); - console.log( + logger.debug( ` Searching for tracks using MBID (primary) + name fallback:` ); for (const c of searchCriteria) { - console.log( + logger.debug( ` - "${c.albumTitle}" by "${c.artistName}" (MBID: ${ c.albumMbid || "none" })` @@ -965,7 +997,7 @@ export class DiscoverWeeklyService { }, }); if (tracks.length > 0) { - console.log( + logger.debug( ` [MBID] Found ${tracks.length} tracks for "${criteria.albumTitle}"` ); } @@ -993,7 +1025,7 @@ export class DiscoverWeeklyService { }, }); if (tracks.length > 0) { - console.log( + logger.debug( ` [NAME] Found ${tracks.length} tracks for "${criteria.albumTitle}"` ); } @@ -1039,7 +1071,7 @@ export class DiscoverWeeklyService { album: { ...album, artist: album.artist }, })); if (tracks.length > 0) { - console.log( + logger.debug( ` [NORMALIZED] Found ${tracks.length} tracks for "${criteria.albumTitle}"` ); break; @@ -1049,7 +1081,7 @@ export class DiscoverWeeklyService { } if (tracks.length === 0) { - console.log( + logger.debug( ` [MISS] No tracks found for "${criteria.albumTitle}" by "${criteria.artistName}"` ); } @@ -1063,10 +1095,10 @@ export class DiscoverWeeklyService { ); allTracks = uniqueTracks; - console.log(` Found ${allTracks.length} tracks from imported albums`); + logger.debug(` Found ${allTracks.length} tracks from imported albums`); if (allTracks.length === 0) { - console.log( + logger.debug( ` No tracks found after scan - albums may not have imported yet` ); await prisma.discoveryBatch.update({ @@ -1112,10 +1144,10 @@ export class DiscoverWeeklyService { const availableAlbums = onePerAlbum.length; const anchorCount = Math.ceil(availableAlbums * 0.2); // Add 20% anchors on top - console.log( + logger.debug( ` Unique albums available: ${availableAlbums} (from ${allTracks.length} total tracks)` ); - console.log( + logger.debug( ` Target composition: ${availableAlbums} discovery + ${anchorCount} anchors = ${ availableAlbums + anchorCount } total` @@ -1126,7 +1158,7 @@ export class DiscoverWeeklyService { // Step 1: Get ALL discovery tracks (1 per album) - no limit! let discoverySelected = [...shuffled]; - console.log( + logger.debug( ` Discovery tracks: ${discoverySelected.length} (ALL available, 1 per album)` ); @@ -1170,7 +1202,7 @@ export class DiscoverWeeklyService { take: anchorCount * 10, // Get extra for 1-per-album selection }); - console.log( + logger.debug( ` Found ${libraryTracks.length} candidate library tracks from ${seedArtistNames.length} seed artists` ); @@ -1206,7 +1238,7 @@ export class DiscoverWeeklyService { // GUARANTEE: If we don't have enough anchors from seed artists, use ANY popular library tracks if (libraryAnchors.length < anchorCount) { const needed = anchorCount - libraryAnchors.length; - console.log( + logger.debug( ` Only ${libraryAnchors.length}/${anchorCount} anchors from seeds, adding ${needed} from popular library tracks` ); @@ -1262,17 +1294,17 @@ export class DiscoverWeeklyService { } libraryAnchors = [...libraryAnchors, ...additionalAnchors]; - console.log( + logger.debug( ` Added ${additionalAnchors.length} popular library tracks as anchors (1 per album)` ); } else { - console.log( + logger.debug( ` No additional library tracks available for anchors` ); } } - console.log( + logger.debug( ` Library anchors: ${libraryAnchors.length}/${anchorCount}` ); @@ -1291,14 +1323,14 @@ export class DiscoverWeeklyService { // Log final result const target = batch.targetSongCount; // For logging purposes only if (selected.length === 0) { - console.log(` FAILED: No tracks available for playlist`); + logger.debug(` FAILED: No tracks available for playlist`); await this.addBatchLog( batchId, "error", `No tracks available for playlist` ); } else if (selected.length < target) { - console.log( + logger.debug( ` NOTE: Got ${selected.length} tracks (target was ${target}, including ALL successful downloads)` ); await this.addBatchLog( @@ -1307,7 +1339,7 @@ export class DiscoverWeeklyService { `Got ${selected.length} tracks (target was ${target})` ); } else { - console.log( + logger.debug( ` SUCCESS: Got ${selected.length} tracks (${discoverySelected.length} discovery + ${libraryAnchors.length} anchors)` ); } @@ -1358,10 +1390,10 @@ export class DiscoverWeeklyService { // Debug: Log if job wasn't matched if (!job) { - console.log( + logger.debug( ` [WARN] No job match for: ${track.album.artist.name} - ${track.album.title}` ); - console.log( + logger.debug( ` Available jobs: ${completedJobs .map( (j) => @@ -1375,7 +1407,7 @@ export class DiscoverWeeklyService { .join(", ")}...` ); } else { - console.log( + logger.debug( ` ✓ Job matched: ${ track.album.artist.name } - ${ @@ -1484,8 +1516,8 @@ export class DiscoverWeeklyService { return { albumCount: createdAlbums.size, trackCount }; }); } catch (txError: any) { - console.error(` ERROR: Transaction failed:`, txError.message); - console.error(` Stack:`, txError.stack); + logger.error(` ERROR: Transaction failed:`, txError.message); + logger.error(` Stack:`, txError.stack); await this.addBatchLog( batchId, "error", @@ -1494,7 +1526,7 @@ export class DiscoverWeeklyService { } if (result) { - console.log( + logger.debug( ` Playlist complete: ${result.trackCount} tracks from ${result.albumCount} albums` ); await this.addBatchLog( @@ -1503,7 +1535,7 @@ export class DiscoverWeeklyService { `Playlist complete: ${result.trackCount} tracks from ${result.albumCount} albums` ); } else { - console.error( + logger.error( ` ERROR: Transaction returned null - no records created` ); await this.addBatchLog( @@ -1521,12 +1553,240 @@ export class DiscoverWeeklyService { await this.cleanupOrphanedLidarrQueue(batchId); } + /** + * Reconcile Discovery Weekly tracks after library scans + * Backfills Discovery Weekly playlists with tracks from albums that downloaded after initial playlist creation + * + * Similar to Spotify Import's reconcilePendingTracks(), but for Discovery Weekly: + * - Finds completed batches from last 7 days + * - Checks if their downloaded albums are in the library + * - Creates DiscoveryAlbum + DiscoveryTrack records for missing albums + */ + async reconcileDiscoveryTracks(): Promise<{ + batchesChecked: number; + tracksAdded: number; + }> { + logger.debug( + `\n[Discovery Weekly] Reconciling tracks across completed batches...` + ); + + const sevenDaysAgo = new Date(); + sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7); + + // Find completed batches from last 7 days + const completedBatches = await prisma.discoveryBatch.findMany({ + where: { + status: "completed", + completedAt: { gte: sevenDaysAgo }, + }, + orderBy: { completedAt: "desc" }, + }); + + if (completedBatches.length === 0) { + logger.debug(` No completed batches in last 7 days to reconcile`); + return { batchesChecked: 0, tracksAdded: 0 }; + } + + logger.debug( + ` Found ${completedBatches.length} completed batch(es) from last 7 days` + ); + + let totalTracksAdded = 0; + let batchesChecked = 0; + + for (const batch of completedBatches) { + logger.debug(` Checking batch ${batch.id}...`); + batchesChecked++; + + // Get completed download jobs for this batch + const completedJobs = await prisma.downloadJob.findMany({ + where: { + discoveryBatchId: batch.id, + status: "completed", + }, + }); + + if (completedJobs.length === 0) { + logger.debug(` No completed jobs in batch ${batch.id}`); + continue; + } + + logger.debug( + ` Found ${completedJobs.length} completed download job(s)` + ); + + // Check each completed job to see if it has corresponding DiscoveryAlbum records + for (const job of completedJobs) { + const metadata = job.metadata as any; + const albumMbid = metadata?.albumMbid || job.targetMbid; + const artistName = metadata?.artistName; + const albumTitle = metadata?.albumTitle; + + if (!albumMbid) { + logger.debug( + ` Skipping job ${job.id} - no album MBID` + ); + continue; + } + + // Check if this album already has DiscoveryAlbum record + const existingDiscoveryAlbum = + await prisma.discoveryAlbum.findFirst({ + where: { + userId: batch.userId, + weekStartDate: batch.weekStart, + rgMbid: albumMbid, + }, + }); + + if (existingDiscoveryAlbum) { + // Already has discovery record, skip + continue; + } + + logger.debug( + ` Album "${albumTitle}" by "${artistName}" missing from Discovery - checking library...` + ); + + // PRIMARY: Search by rgMbid (most accurate) + let tracks: any[] = []; + if (albumMbid) { + tracks = await prisma.track.findMany({ + where: { + album: { rgMbid: albumMbid }, + }, + include: { + album: { include: { artist: true } }, + }, + }); + if (tracks.length > 0) { + logger.debug( + ` [MBID] Found ${tracks.length} tracks in library` + ); + } + } + + // FALLBACK: Search by artist name + album title (case-insensitive) + if (tracks.length === 0 && artistName && albumTitle) { + logger.debug( + ` [NAME] Trying name-based search: "${artistName}" - "${albumTitle}"` + ); + tracks = await prisma.track.findMany({ + where: { + album: { + title: { + equals: albumTitle, + mode: "insensitive", + }, + artist: { + name: { + equals: artistName, + mode: "insensitive", + }, + }, + }, + }, + include: { + album: { include: { artist: true } }, + }, + }); + if (tracks.length > 0) { + logger.debug( + ` [NAME] Found ${tracks.length} tracks in library` + ); + } + } + + if (tracks.length === 0) { + logger.debug( + ` No tracks found in library - album may not have imported yet` + ); + continue; + } + + // Album is in library! Create DiscoveryAlbum + DiscoveryTrack records + const album = tracks[0].album; + const similarity = metadata?.similarity || 0.5; + const tier = + metadata?.tier || getTierFromSimilarity(similarity); + + logger.debug( + ` ✓ Creating Discovery records for ${tracks.length} track(s)...` + ); + + try { + await prisma.$transaction(async (tx) => { + // Create DiscoveryAlbum + const discoveryAlbum = await tx.discoveryAlbum.create({ + data: { + userId: batch.userId, + rgMbid: album.rgMbid, + artistName: album.artist.name, + artistMbid: album.artist.mbid, + albumTitle: album.title, + lidarrAlbumId: job.lidarrAlbumId, + similarity, + tier, + weekStartDate: batch.weekStart, + downloadedAt: new Date(), + status: "ACTIVE", + }, + }); + + // Create DiscoveryTrack for each track + for (const track of tracks) { + // Check if track already exists (prevent duplicates) + const existingTrack = + await tx.discoveryTrack.findFirst({ + where: { + discoveryAlbumId: discoveryAlbum.id, + trackId: track.id, + }, + }); + + if (!existingTrack) { + await tx.discoveryTrack.create({ + data: { + discoveryAlbumId: discoveryAlbum.id, + trackId: track.id, + fileName: + track.filePath.split("/").pop() || + "", + filePath: track.filePath, + }, + }); + totalTracksAdded++; + } + } + }); + + logger.debug( + ` ✓ Added ${tracks.length} track(s) to Discovery Weekly` + ); + } catch (error: any) { + logger.error( + ` ✗ Failed to create Discovery records: ${error.message}` + ); + } + } + } + + logger.debug( + ` Reconciliation complete: ${totalTracksAdded} tracks added across ${batchesChecked} batches` + ); + + return { + batchesChecked, + tracksAdded: totalTracksAdded, + }; + } + /** * Cleanup orphaned Lidarr queue items that belong to this discovery batch * but are no longer needed (download completed but album not in final playlist) */ private async cleanupOrphanedLidarrQueue(batchId: string): Promise { - console.log(`\n[CLEANUP] Checking for orphaned Lidarr queue items...`); + logger.debug(`\n[CLEANUP] Checking for orphaned Lidarr queue items...`); try { const batch = await prisma.discoveryBatch.findUnique({ @@ -1554,7 +1814,7 @@ export class DiscoverWeeklyService { } if (ourDownloadIds.size === 0) { - console.log(` No download IDs to check`); + logger.debug(` No download IDs to check`); return; } @@ -1599,7 +1859,7 @@ export class DiscoverWeeklyService { timeout: 10000, } ); - console.log( + logger.debug( ` Removed orphaned queue item: ${item.title}` ); removed++; @@ -1611,12 +1871,12 @@ export class DiscoverWeeklyService { } if (removed > 0) { - console.log(` Cleaned up ${removed} orphaned queue item(s)`); + logger.debug(` Cleaned up ${removed} orphaned queue item(s)`); } else { - console.log(` No orphaned queue items found`); + logger.debug(` No orphaned queue items found`); } } catch (error: any) { - console.error( + logger.error( `[CLEANUP] Error cleaning orphaned queue:`, error.message ); @@ -1628,12 +1888,12 @@ export class DiscoverWeeklyService { * Only removes artists that: * - Had ALL their downloads fail in this batch * - Don't have any other music in the user's library - * + * * NOTE: With tag-based tracking, we simply remove artists with the discovery tag * who don't have successful downloads. The tag is the source of truth. */ private async cleanupFailedArtists(batchId: string): Promise { - console.log( + logger.debug( `\n[CLEANUP] Tag-based cleanup for failed discovery artists...` ); @@ -1655,11 +1915,15 @@ export class DiscoverWeeklyService { } } - console.log(` ${successfulArtistMbids.size} artists had successful downloads`); + logger.debug( + ` ${successfulArtistMbids.size} artists had successful downloads` + ); // Get all artists with the discovery tag const discoveryArtists = await lidarrService.getDiscoveryArtists(); - console.log(` ${discoveryArtists.length} artists in Lidarr have discovery tag`); + logger.debug( + ` ${discoveryArtists.length} artists in Lidarr have discovery tag` + ); let removed = 0; let kept = 0; @@ -1685,7 +1949,9 @@ export class DiscoverWeeklyService { }); if (hasKept) { - console.log(` Keeping ${artistName} - has liked albums (removing tag)`); + logger.debug( + ` Keeping ${artistName} - has liked albums (removing tag)` + ); await lidarrService.removeDiscoveryTagByMbid(artistMbid); kept++; continue; @@ -1701,26 +1967,31 @@ export class DiscoverWeeklyService { }); if (hasActiveOther) { - console.log(` Keeping ${artistName} - has active albums from other batches`); + logger.debug( + ` Keeping ${artistName} - has active albums from other batches` + ); kept++; continue; } // Artist has discovery tag, no successful downloads, no liked albums = remove try { - const result = await lidarrService.deleteArtistById(lidarrArtist.id, true); + const result = await lidarrService.deleteArtistById( + lidarrArtist.id, + true + ); if (result.success) { - console.log(` ✓ Removed: ${artistName}`); + logger.debug(` Removed: ${artistName}`); removed++; } } catch (error: any) { - console.error(` ✗ Failed to remove ${artistName}: ${error.message}`); + logger.error( + ` Failed to remove ${artistName}: ${error.message}` + ); } } - console.log( - ` Cleanup complete: ${removed} removed, ${kept} kept` - ); + logger.debug(` Cleanup complete: ${removed} removed, ${kept} kept`); await this.addBatchLog( batchId, "info", @@ -1736,7 +2007,7 @@ export class DiscoverWeeklyService { extraJobs: any[], userId: string ): Promise { - console.log( + logger.debug( `\n[CLEANUP] Removing ${extraJobs.length} extra albums from Lidarr and filesystem...` ); @@ -1763,7 +2034,7 @@ export class DiscoverWeeklyService { true ); if (result.success) { - console.log( + logger.debug( ` ✓ Removed: ${artistName} - ${albumTitle}` ); albumsRemoved++; @@ -1773,12 +2044,12 @@ export class DiscoverWeeklyService { artistsToCheck.add(artistMbid); } } else { - console.log( + logger.debug( ` - Skip: ${artistName} - ${albumTitle} (${result.message})` ); } } else { - console.log( + logger.debug( ` - Skip: ${artistName} - ${albumTitle} (no Lidarr ID)` ); } @@ -1793,7 +2064,7 @@ export class DiscoverWeeklyService { }, }); } catch (error: any) { - console.error( + logger.error( ` ✗ Error: ${artistName} - ${albumTitle}: ${error.message}` ); errors++; @@ -1821,7 +2092,7 @@ export class DiscoverWeeklyService { true ); if (result.success) { - console.log(` ✓ Removed empty artist: ${artistMbid}`); + logger.debug(` Removed empty artist: ${artistMbid}`); } } } catch (error) { @@ -1829,7 +2100,7 @@ export class DiscoverWeeklyService { } } - console.log( + logger.debug( ` Extra album cleanup: ${albumsRemoved} removed, ${errors} errors` ); } @@ -1904,7 +2175,7 @@ export class DiscoverWeeklyService { include: { albums: { take: 1 } }, }); if (byMbid && byMbid.albums.length > 0) { - console.log( + logger.debug( ` [LIBRARY] ${artistName} IN LIBRARY (matched by MBID, ${byMbid.albums.length} album(s))` ); return true; @@ -1920,7 +2191,7 @@ export class DiscoverWeeklyService { }); if (byName !== null && byName.albums.length > 0) { - console.log( + logger.debug( ` [LIBRARY] ${artistName} IN LIBRARY (matched by name, ${byName.albums.length} album(s))` ); return true; @@ -1959,7 +2230,7 @@ export class DiscoverWeeklyService { }, }); if (album) { - console.log( + logger.debug( ` [OWNED-NAME] Found "${albumTitle}" by "${artistName}" in Album table` ); return true; @@ -1994,7 +2265,7 @@ export class DiscoverWeeklyService { (ownedNormalized.includes(normalizedAlbum) || normalizedAlbum.includes(ownedNormalized)) ) { - console.log( + logger.debug( ` [OWNED-NAME] Found "${albumTitle}" by "${artistName}" in OwnedAlbum table` ); return true; @@ -2082,7 +2353,7 @@ export class DiscoverWeeklyService { const metadata = failedJob.metadata as any; const failedArtistMbid = metadata?.artistMbid; - console.log( + logger.debug( `[Discovery] Finding replacement for: ${metadata?.artistName} - ${metadata?.albumTitle}` ); @@ -2100,13 +2371,13 @@ export class DiscoverWeeklyService { } } - console.log( + logger.debug( `[Discovery] Already have ${attemptedArtistMbids.size} artists in batch, prioritizing new artists` ); // Tier 2: Try album from DIFFERENT similar artist - search ALL seeds with more similar artists // IMPORTANT: Never pick same artist twice for diversity! - console.log( + logger.debug( `[Discovery] Tier 2: Searching ALL seeds for albums from NEW artists (diversity enforced)` ); const seeds = await this.getSeedArtists(batch.userId); @@ -2154,13 +2425,13 @@ export class DiscoverWeeklyService { similar.mbid ); if (artistInLibrary) { - console.log( + logger.debug( `[Discovery] Skipping ${similar.name} - already in library` ); continue; } } catch (e: any) { - console.error( + logger.error( `[Discovery] isArtistInLibrary error for ${similar.name}: ${e.message}` ); // Continue anyway - assume not in library if check fails @@ -2174,7 +2445,7 @@ export class DiscoverWeeklyService { ); if (owned) continue; } catch (e: any) { - console.error( + logger.error( `[Discovery] isAlbumOwned error: ${e.message}` ); continue; // Skip on error @@ -2188,13 +2459,13 @@ export class DiscoverWeeklyService { ); if (excluded) continue; } catch (e: any) { - console.error( + logger.error( `[Discovery] isAlbumExcluded error: ${e.message}` ); continue; // Skip on error } - console.log( + logger.debug( `[Discovery] Tier 2 replacement found: ${album.name} by ${similar.name} (NEW artist!)` ); return { @@ -2214,12 +2485,12 @@ export class DiscoverWeeklyService { // NOTE: Same-artist fallback REMOVED - we enforce strict one-album-per-artist // If we can't find a new artist, go straight to library anchor - console.log( + logger.debug( `[Discovery] No new artists found, using library anchor (diversity enforced)` ); // Tier 3: Use track from user's library as anchor (related to discovery seeds) - console.log( + logger.debug( `[Discovery] Tier 3: Selecting anchor track from user's library (seed artists)` ); try { @@ -2248,7 +2519,7 @@ export class DiscoverWeeklyService { ownedAlbum.rgMbid && !attemptedMbids.has(ownedAlbum.rgMbid) ) { - console.log( + logger.debug( `[Discovery] Tier 3 anchor found: ${ownedAlbum.artist.name} - ${ownedAlbum.title} (from library)` ); return { @@ -2262,12 +2533,12 @@ export class DiscoverWeeklyService { } } } catch (e) { - console.log( + logger.debug( `[Discovery] Tier 3 search failed: ${(e as Error).message}` ); } - console.log(`[Discovery] No replacement found`); + logger.debug(`[Discovery] No replacement found`); return null; } @@ -2288,8 +2559,8 @@ export class DiscoverWeeklyService { const seenAlbums = new Set(); const existingArtistsForFallback: any[] = []; // Artists in library saved for second pass - console.log(`\n Finding ${targetCount} recommended albums...`); - console.log(` Seeds: ${seeds.map((s) => s.name).join(", ")}`); + logger.debug(`\n Finding ${targetCount} recommended albums...`); + logger.debug(` Seeds: ${seeds.map((s) => s.name).join(", ")}`); let totalSimilarArtists = 0; let totalAlbumsChecked = 0; @@ -2308,14 +2579,14 @@ export class DiscoverWeeklyService { allSimilarArtists.push(sim); } } - console.log( + logger.debug( ` Total similar artists from all seeds: ${allSimilarArtists.length}` ); // ============================================ // PASS 1: NEW ARTISTS ONLY (true discovery) // ============================================ - console.log(`\n === PASS 1: NEW Artists Only ===`); + logger.debug(`\n === PASS 1: NEW Artists Only ===`); for (const sim of allSimilarArtists) { if (recommendations.length >= targetCount) break; @@ -2333,7 +2604,7 @@ export class DiscoverWeeklyService { sim.mbid ); } catch (e: any) { - console.error( + logger.error( ` isArtistInLibrary ERROR for ${sim.name}: ${e.message}` ); } @@ -2359,14 +2630,14 @@ export class DiscoverWeeklyService { if (album.recommendation) { recommendations.push(album.recommendation); - console.log( + logger.debug( ` ✓ ADDED (NEW): ${sim.name} - ${album.recommendation.albumTitle}` ); } } } - console.log( + logger.debug( ` Pass 1 complete: ${recommendations.length}/${targetCount} from NEW artists` ); @@ -2377,8 +2648,8 @@ export class DiscoverWeeklyService { recommendations.length < targetCount && existingArtistsForFallback.length > 0 ) { - console.log(`\n === PASS 2: Existing Artists (fallback) ===`); - console.log( + logger.debug(`\n === PASS 2: Existing Artists (fallback) ===`); + logger.debug( ` Need ${targetCount - recommendations.length} more, have ${ existingArtistsForFallback.length } existing artists to try` @@ -2403,61 +2674,61 @@ export class DiscoverWeeklyService { if (album.recommendation) { recommendations.push(album.recommendation); addedFromExistingArtists++; - console.log( + logger.debug( ` ✓ ADDED (EXISTING): ${sim.name} - ${album.recommendation.albumTitle}` ); } } } - console.log( + logger.debug( ` Pass 2 complete: Added ${addedFromExistingArtists} from existing artists` ); } // Summary logging - console.log(`\n === Recommendation Summary ===`); - console.log(` Similar artists checked: ${totalSimilarArtists}`); - console.log( + logger.debug(`\n === Recommendation Summary ===`); + logger.debug(` Similar artists checked: ${totalSimilarArtists}`); + logger.debug( ` Artists already in library (fallback pool): ${skippedArtistInLibrary}` ); - console.log(` Albums checked: ${totalAlbumsChecked}`); - console.log(` Skipped (no MBID from MusicBrainz): ${skippedNoMbid}`); - console.log(` Skipped (album already owned): ${skippedOwned}`); - console.log( + logger.debug(` Albums checked: ${totalAlbumsChecked}`); + logger.debug(` Skipped (no MBID from MusicBrainz): ${skippedNoMbid}`); + logger.debug(` Skipped (album already owned): ${skippedOwned}`); + logger.debug( ` Skipped (excluded - recently recommended): ${skippedExcluded}` ); - console.log(` Skipped (duplicate): ${skippedDuplicate}`); - console.log(` ✓ Found ${recommendations.length} albums total`); - console.log( + logger.debug(` Skipped (duplicate): ${skippedDuplicate}`); + logger.debug(` Found ${recommendations.length} albums total`); + logger.debug( ` - ${ recommendations.length - addedFromExistingArtists } from NEW artists` ); - console.log( + logger.debug( ` - ${addedFromExistingArtists} from EXISTING artists (fallback)` ); if (recommendations.length === 0 && totalSimilarArtists === 0) { - console.log( + logger.debug( ` [WARN] No similar artists found! Check Last.fm API configuration.` ); } else if (recommendations.length === 0 && totalAlbumsChecked === 0) { - console.log( + logger.debug( ` [WARN] No albums returned from Last.fm! Check getArtistTopAlbums.` ); } else if ( recommendations.length === 0 && skippedNoMbid === totalAlbumsChecked ) { - console.log( + logger.debug( ` [WARN] All albums failed MusicBrainz lookup! Check searchAlbum.` ); } else if ( recommendations.length === 0 && skippedOwned >= totalAlbumsChecked ) { - console.log( + logger.debug( ` [WARN] All albums already owned! Need more variety in similar artists.` ); } @@ -2608,7 +2879,7 @@ export class DiscoverWeeklyService { }; } } catch (error: any) { - console.warn( + logger.warn( ` Failed to get albums for ${artist.name}: ${error.message}` ); } @@ -2652,11 +2923,15 @@ export class DiscoverWeeklyService { }); // Collect genres from artists (stored as tags) + // MERGE canonical genres + user-added genres const genreCounts = new Map(); for (const play of recentPlays) { const artist = play.track?.album?.artist; - if (artist?.genres) { + if (!artist) continue; + + // Collect canonical genres + if (artist.genres) { const genres = Array.isArray(artist.genres) ? artist.genres : ((artist.genres as string) || "") @@ -2672,6 +2947,22 @@ export class DiscoverWeeklyService { } } } + + // Also collect user-added genres (metadata override system) + if (artist.userGenres) { + const userGenres = Array.isArray(artist.userGenres) + ? artist.userGenres + : []; + + for (const genre of userGenres) { + if (genre && typeof genre === "string") { + genreCounts.set( + genre.toLowerCase(), + (genreCounts.get(genre.toLowerCase()) || 0) + 1 + ); + } + } + } } // Sort by count and return top genres @@ -2680,7 +2971,7 @@ export class DiscoverWeeklyService { .slice(0, 10) .map(([genre]) => genre); } catch (error) { - console.error("Error getting user genres:", error); + logger.error("Error getting user genres:", error); return []; } } @@ -2694,7 +2985,7 @@ export class DiscoverWeeklyService { targetCount: number, seenAlbums: Set ): Promise { - console.log( + logger.debug( `\n[STRATEGY] Tag Exploration - finding studio albums by genre` ); @@ -2722,11 +3013,11 @@ export class DiscoverWeeklyService { }; if (genres.length === 0) { - console.log(` No genres found for user, using fallback tags`); + logger.debug(` No genres found for user, using fallback tags`); genres.push("rock", "indie", "alternative"); // Fallback } - console.log(` User's top genres: ${genres.slice(0, 5).join(", ")}`); + logger.debug(` User's top genres: ${genres.slice(0, 5).join(", ")}`); for (const genre of genres.slice(0, 5)) { if (recommendations.length >= targetCount) break; @@ -2787,18 +3078,18 @@ export class DiscoverWeeklyService { similarity: 0.7, // Tag-based discovery tier: "wildcard", }); - console.log( + logger.debug( ` ✓ TAG: ${artistName} - ${album.name} (${genre})` ); } } catch (error: any) { - console.warn( + logger.warn( ` Tag search failed for ${genre}: ${error.message}` ); } } - console.log( + logger.debug( ` Tag exploration found ${recommendations.length} albums` ); return recommendations; @@ -2824,9 +3115,9 @@ export class DiscoverWeeklyService { const seenArtists = new Set(); const recommendations: RecommendedAlbum[] = []; - console.log(`\n[DISCOVERY] Tier-Based Selection`); - console.log(` Target: ${targetCount} albums`); - console.log( + logger.debug(`\n[DISCOVERY] Tier-Based Selection`); + logger.debug(` Target: ${targetCount} albums`); + logger.debug( ` Distribution: 30% high, 40% medium, 20% explore, 10% wildcard` ); @@ -2845,7 +3136,7 @@ export class DiscoverWeeklyService { ); const exploreCount = similarArtistTarget - highCount - mediumCount; - console.log( + logger.debug( ` Targets: ${highCount} high, ${mediumCount} medium, ${exploreCount} explore, ${wildcardCount} wildcard` ); @@ -2870,13 +3161,13 @@ export class DiscoverWeeklyService { ), }; - console.log( + logger.debug( ` Available: ${byTier.high.length} high, ${byTier.medium.length} medium, ${byTier.explore.length} explore` ); // Debug: Show top artists from each tier with their match scores if (byTier.high.length > 0) { - console.log( + logger.debug( ` HIGH tier sample: ${byTier.high .slice(0, 3) .map((a) => `${a.name}(${(a.match * 100).toFixed(0)}%)`) @@ -2884,7 +3175,7 @@ export class DiscoverWeeklyService { ); } if (byTier.medium.length > 0) { - console.log( + logger.debug( ` MEDIUM tier sample: ${byTier.medium .slice(0, 3) .map((a) => `${a.name}(${(a.match * 100).toFixed(0)}%)`) @@ -2892,7 +3183,7 @@ export class DiscoverWeeklyService { ); } if (byTier.explore.length > 0) { - console.log( + logger.debug( ` EXPLORE tier sample: ${byTier.explore .slice(0, 3) .map((a) => `${a.name}(${(a.match * 100).toFixed(0)}%)`) @@ -2933,7 +3224,7 @@ export class DiscoverWeeklyService { } if (artistInLibrary) { - console.log(` [SKIP] ${artist.name} - in library`); + logger.debug(` [SKIP] ${artist.name} - in library`); continue; } @@ -2951,7 +3242,7 @@ export class DiscoverWeeklyService { result.recommendation.similarity = artist.match || result.recommendation.similarity; selected.push(result.recommendation); - console.log( + logger.debug( ` ✓ [${tierName.toUpperCase()}] ${artist.name} - ${ result.recommendation.albumTitle } (${((artist.match || 0) * 100).toFixed(0)}%)` @@ -2963,11 +3254,11 @@ export class DiscoverWeeklyService { }; // Select from each tier - console.log(`\n === Selecting from HIGH tier ===`); + logger.debug(`\n === Selecting from HIGH tier ===`); const highPicks = await selectFromTier(byTier.high, highCount, "high"); recommendations.push(...highPicks); - console.log(`\n === Selecting from MEDIUM tier ===`); + logger.debug(`\n === Selecting from MEDIUM tier ===`); const mediumPicks = await selectFromTier( byTier.medium, mediumCount, @@ -2975,7 +3266,7 @@ export class DiscoverWeeklyService { ); recommendations.push(...mediumPicks); - console.log(`\n === Selecting from EXPLORE tier ===`); + logger.debug(`\n === Selecting from EXPLORE tier ===`); const explorePicks = await selectFromTier( byTier.explore, exploreCount, @@ -2985,7 +3276,7 @@ export class DiscoverWeeklyService { // If we didn't get enough from tiered selection, fill with any available NEW artists if (recommendations.length < similarArtistTarget) { - console.log( + logger.debug( `\n === Filling remaining slots (NEW artists only) ===` ); const remaining = similarArtistTarget - recommendations.length; @@ -3013,7 +3304,7 @@ export class DiscoverWeeklyService { } if (artistInLibrary) { - console.log(` [SKIP] ${artist.name} - in library`); + logger.debug(` [SKIP] ${artist.name} - in library`); continue; } @@ -3032,7 +3323,7 @@ export class DiscoverWeeklyService { result.recommendation.similarity = artist.match || result.recommendation.similarity; recommendations.push(result.recommendation); - console.log( + logger.debug( ` ✓ [FILL] ${artist.name} - ${ result.recommendation.albumTitle } (${(artist.match * 100).toFixed(0)}%)` @@ -3043,10 +3334,10 @@ export class DiscoverWeeklyService { // FALLBACK: If still not enough, allow existing artists with NEW albums if (recommendations.length < similarArtistTarget) { - console.log( + logger.debug( `\n === FALLBACK: Existing artists with NEW albums ===` ); - console.log( + logger.debug( ` Need ${ similarArtistTarget - recommendations.length } more recommendations` @@ -3078,7 +3369,7 @@ export class DiscoverWeeklyService { result.recommendation.similarity = artist.match || result.recommendation.similarity; recommendations.push(result.recommendation); - console.log( + logger.debug( ` ✓ [EXISTING] ${artist.name} - ${ result.recommendation.albumTitle } (${((artist.match || 0) * 100).toFixed(0)}%)` @@ -3088,7 +3379,7 @@ export class DiscoverWeeklyService { } // Add genre wildcards for variety - console.log( + logger.debug( `\n === Adding ${wildcardCount} WILDCARD picks from genre tags ===` ); const wildcards = await this.tagExplorationStrategy( @@ -3110,8 +3401,8 @@ export class DiscoverWeeklyService { .length, }; - console.log(`\n[DISCOVERY] Final: ${recommendations.length} albums`); - console.log( + logger.debug(`\n[DISCOVERY] Final: ${recommendations.length} albums`); + logger.debug( ` High: ${tierCounts.high}, Medium: ${tierCounts.medium}, Explore: ${tierCounts.explore}, Wildcard: ${tierCounts.wildcard}` ); diff --git a/backend/src/services/discoveryLogger.ts b/backend/src/services/discoveryLogger.ts index 2d52817..40a4fe8 100644 --- a/backend/src/services/discoveryLogger.ts +++ b/backend/src/services/discoveryLogger.ts @@ -1,4 +1,5 @@ import * as fs from "fs"; +import { logger } from "../utils/logger"; import * as path from "path"; /** @@ -59,7 +60,7 @@ class DiscoveryLogger { } // Also write to console for real-time visibility - console.log(message); + logger.debug(message); } /** diff --git a/backend/src/services/downloadQueue.ts b/backend/src/services/downloadQueue.ts index dfdf90f..9daa1bc 100644 --- a/backend/src/services/downloadQueue.ts +++ b/backend/src/services/downloadQueue.ts @@ -1,3 +1,5 @@ +import { logger } from "../utils/logger"; + interface DownloadInfo { downloadId: string; albumTitle: string; @@ -72,15 +74,15 @@ class DownloadQueueManager { }; this.activeDownloads.set(downloadId, info); - console.log( + logger.debug( `[DOWNLOAD] Started: "${albumTitle}" by ${artistName} (${downloadId})` ); - console.log(` Album MBID: ${albumMbid}`); - console.log(` Active downloads: ${this.activeDownloads.size}`); + logger.debug(` Album MBID: ${albumMbid}`); + logger.debug(` Active downloads: ${this.activeDownloads.size}`); // Persist Lidarr download reference to download job for later status updates this.linkDownloadJob(downloadId, albumMbid).catch((error) => { - console.error(` linkDownloadJob error:`, error); + logger.error(` linkDownloadJob error:`, error); }); // Start timeout on first download @@ -108,12 +110,12 @@ class DownloadQueueManager { */ async completeDownload(downloadId: string, albumTitle: string) { this.activeDownloads.delete(downloadId); - console.log(`Download complete: "${albumTitle}" (${downloadId})`); - console.log(` Remaining downloads: ${this.activeDownloads.size}`); + logger.debug(`Download complete: "${albumTitle}" (${downloadId})`); + logger.debug(` Remaining downloads: ${this.activeDownloads.size}`); // If no more downloads, trigger refresh immediately if (this.activeDownloads.size === 0) { - console.log(`⏰ All downloads complete! Starting refresh now...`); + logger.debug(`⏰ All downloads complete! Starting refresh now...`); this.clearTimeout(); this.triggerFullRefresh(); } @@ -125,29 +127,29 @@ class DownloadQueueManager { async failDownload(downloadId: string, reason: string) { const info = this.activeDownloads.get(downloadId); if (!info) { - console.log( + logger.debug( ` Download ${downloadId} not tracked, ignoring failure` ); return; } - console.log(` Download failed: "${info.albumTitle}" (${downloadId})`); - console.log(` Reason: ${reason}`); - console.log(` Attempt ${info.attempts}/${this.MAX_RETRY_ATTEMPTS}`); + logger.debug(` Download failed: "${info.albumTitle}" (${downloadId})`); + logger.debug(` Reason: ${reason}`); + logger.debug(` Attempt ${info.attempts}/${this.MAX_RETRY_ATTEMPTS}`); // Check if we should retry if (info.attempts < this.MAX_RETRY_ATTEMPTS) { info.attempts++; - console.log(` Retrying download... (attempt ${info.attempts})`); + logger.debug(` Retrying download... (attempt ${info.attempts})`); await this.retryDownload(info); } else { - console.log(` ⛔ Max retry attempts reached, giving up`); + logger.debug(` ⛔ Max retry attempts reached, giving up`); await this.cleanupFailedAlbum(info); this.activeDownloads.delete(downloadId); // Check if all downloads are done if (this.activeDownloads.size === 0) { - console.log( + logger.debug( `⏰ All downloads finished (some failed). Starting refresh...` ); this.clearTimeout(); @@ -162,7 +164,7 @@ class DownloadQueueManager { private async retryDownload(info: DownloadInfo) { try { if (!info.albumId) { - console.log(` No album ID, cannot retry`); + logger.debug(` No album ID, cannot retry`); return; } @@ -176,7 +178,7 @@ class DownloadQueueManager { !settings.lidarrUrl || !settings.lidarrApiKey ) { - console.log(` Lidarr not configured`); + logger.debug(` Lidarr not configured`); return; } @@ -195,9 +197,9 @@ class DownloadQueueManager { } ); - console.log(` Retry search triggered in Lidarr`); + logger.debug(` Retry search triggered in Lidarr`); } catch (error: any) { - console.log(` Failed to retry: ${error.message}`); + logger.debug(` Failed to retry: ${error.message}`); } } @@ -206,7 +208,7 @@ class DownloadQueueManager { */ private async cleanupFailedAlbum(info: DownloadInfo) { try { - console.log(` Cleaning up failed album: ${info.albumTitle}`); + logger.debug(` Cleaning up failed album: ${info.albumTitle}`); const { getSystemSettings } = await import( "../utils/systemSettings" @@ -233,9 +235,9 @@ class DownloadQueueManager { timeout: 10000, } ); - console.log(` Removed album from Lidarr`); + logger.debug(` Removed album from Lidarr`); } catch (error: any) { - console.log(` Failed to remove album: ${error.message}`); + logger.debug(` Failed to remove album: ${error.message}`); } } @@ -264,27 +266,27 @@ class DownloadQueueManager { timeout: 10000, } ); - console.log( + logger.debug( ` Removed artist from Lidarr (no other albums)` ); } } catch (error: any) { - console.log( + logger.debug( ` Failed to check/remove artist: ${error.message}` ); } } - // Mark as failed in Discovery database + // Mark as deleted in Discovery database (closest to failed status) const { prisma } = await import("../utils/db"); await prisma.discoveryAlbum.updateMany({ where: { albumTitle: info.albumTitle }, - data: { status: "FAILED" }, + data: { status: "DELETED" }, }); - console.log(` Marked as failed in database`); + logger.debug(` Marked as failed in database`); // Notify callbacks about unavailable album - console.log( + logger.debug( ` [NOTIFY] Notifying ${this.unavailableCallbacks.length} callbacks about unavailable album` ); for (const callback of this.unavailableCallbacks) { @@ -299,11 +301,11 @@ class DownloadQueueManager { similarity: info.similarity, }); } catch (error: any) { - console.log(` Callback error: ${error.message}`); + logger.debug(` Callback error: ${error.message}`); } } } catch (error: any) { - console.log(` Cleanup error: ${error.message}`); + logger.debug(` Cleanup error: ${error.message}`); } } @@ -312,20 +314,20 @@ class DownloadQueueManager { */ private startTimeout() { const timeoutMs = this.TIMEOUT_MINUTES * 60 * 1000; - console.log( + logger.debug( `[TIMER] Starting ${this.TIMEOUT_MINUTES}-minute timeout for automatic scan` ); this.timeoutTimer = setTimeout(() => { if (this.activeDownloads.size > 0) { - console.log( + logger.debug( `\n Timeout reached! ${this.activeDownloads.size} downloads still pending.` ); - console.log(` These downloads never completed:`); + logger.debug(` These downloads never completed:`); // Mark each pending download as failed to trigger callbacks for (const [downloadId, info] of this.activeDownloads) { - console.log( + logger.debug( ` - ${info.albumTitle} by ${info.artistName}` ); // This will trigger the unavailable album callback @@ -333,14 +335,14 @@ class DownloadQueueManager { downloadId, "Download timeout - never completed" ).catch((err) => { - console.error( + logger.error( `Error failing download ${downloadId}:`, err ); }); } - console.log( + logger.debug( ` Triggering scan anyway to process completed downloads...\n` ); } else { @@ -364,27 +366,27 @@ class DownloadQueueManager { */ private async triggerFullRefresh() { try { - console.log("\n Starting full library refresh...\n"); + logger.debug("\n Starting full library refresh...\n"); // Step 1: Clear failed imports from Lidarr - console.log("[1/2] Checking for failed imports in Lidarr..."); + logger.debug("[1/2] Checking for failed imports in Lidarr..."); await this.clearFailedLidarrImports(); // Step 2: Trigger Lidify library sync - console.log("[2/2] Triggering Lidify library sync..."); + logger.debug("[2/2] Triggering Lidify library sync..."); const lidifySuccess = await this.triggerLidifySync(); if (!lidifySuccess) { - console.error(" Lidify sync failed"); + logger.error(" Lidify sync failed"); return; } - console.log("Lidify sync started"); - console.log( + logger.debug("Lidify sync started"); + logger.debug( "\n[SUCCESS] Full library refresh complete! New music should appear shortly.\n" ); } catch (error) { - console.error(" Library refresh error:", error); + logger.error(" Library refresh error:", error); } } @@ -399,7 +401,7 @@ class DownloadQueueManager { const settings = await getSystemSettings(); if (!settings.lidarrEnabled || !settings.lidarrUrl) { - console.log(" Lidarr not configured, skipping"); + logger.debug(" Lidarr not configured, skipping"); return; } @@ -408,7 +410,7 @@ class DownloadQueueManager { // Get Lidarr API key const apiKey = settings.lidarrApiKey; if (!apiKey) { - console.log(" Lidarr API key not found, skipping"); + logger.debug(" Lidarr API key not found, skipping"); return; } @@ -433,11 +435,11 @@ class DownloadQueueManager { ); if (failed.length === 0) { - console.log(" No failed imports found"); + logger.debug(" No failed imports found"); return; } - console.log(` Found ${failed.length} failed import(s)`); + logger.debug(` Found ${failed.length} failed import(s)`); for (const item of failed) { const artistName = @@ -445,7 +447,7 @@ class DownloadQueueManager { const albumTitle = item.album?.title || item.album?.name || "Unknown Album"; - console.log(` ${artistName} - ${albumTitle}`); + logger.debug(` ${artistName} - ${albumTitle}`); try { // Remove from queue, blocklist, and trigger search @@ -474,22 +476,22 @@ class DownloadQueueManager { timeout: 10000, } ); - console.log( + logger.debug( ` → Blocklisted and searching for alternative` ); } else { - console.log( + logger.debug( ` → Blocklisted (no album ID for re-search)` ); } } catch (error: any) { - console.log(` Failed to process: ${error.message}`); + logger.debug(` Failed to process: ${error.message}`); } } - console.log(` Cleared ${failed.length} failed import(s)`); + logger.debug(` Cleared ${failed.length} failed import(s)`); } catch (error: any) { - console.log(` Failed to check Lidarr queue: ${error.message}`); + logger.debug(` Failed to check Lidarr queue: ${error.message}`); } } @@ -501,12 +503,12 @@ class DownloadQueueManager { const { scanQueue } = await import("../workers/queues"); const { prisma } = await import("../utils/db"); - console.log(" Starting library scan..."); + logger.debug(" Starting library scan..."); // Get first user for scanning const firstUser = await prisma.user.findFirst(); if (!firstUser) { - console.error(` No users found in database, cannot scan`); + logger.error(` No users found in database, cannot scan`); return false; } @@ -516,10 +518,10 @@ class DownloadQueueManager { source: "download-queue", }); - console.log("Library scan queued"); + logger.debug("Library scan queued"); return true; } catch (error: any) { - console.error("Lidify sync trigger error:", error.message); + logger.error("Lidify sync trigger error:", error.message); return false; } } @@ -546,7 +548,7 @@ class DownloadQueueManager { * Manually trigger a full refresh (for testing or manual triggers) */ async manualRefresh() { - console.log("\n Manual refresh triggered...\n"); + logger.debug("\n Manual refresh triggered...\n"); await this.triggerFullRefresh(); } @@ -561,7 +563,7 @@ class DownloadQueueManager { for (const [downloadId, info] of this.activeDownloads) { const age = now - info.startTime; if (age > this.STALE_TIMEOUT_MS) { - console.log( + logger.debug( `[CLEANUP] Cleaning up stale download: "${ info.albumTitle }" (${downloadId}) - age: ${Math.round( @@ -574,7 +576,7 @@ class DownloadQueueManager { } if (cleanedCount > 0) { - console.log( + logger.debug( `[CLEANUP] Cleaned up ${cleanedCount} stale download(s)` ); } @@ -582,6 +584,71 @@ class DownloadQueueManager { return cleanedCount; } + /** + * Reconcile in-memory state with database on startup + * - Mark stale jobs (>30 min without update) as failed + * - Load active/processing jobs into memory + */ + async reconcileOnStartup(): Promise<{ loaded: number; failed: number }> { + const { prisma } = await import("../utils/db"); + + const staleThreshold = new Date(Date.now() - this.STALE_TIMEOUT_MS); + + // Mark stale processing jobs as failed + const staleResult = await prisma.downloadJob.updateMany({ + where: { + status: "processing", + startedAt: { lt: staleThreshold } + }, + data: { + status: "failed", + error: "Server restart - download was processing but never completed" + } + }); + + logger.debug(`[DOWNLOAD] Marked ${staleResult.count} stale downloads as failed`); + + // Load recent processing jobs into memory (not stale) + const activeJobs = await prisma.downloadJob.findMany({ + where: { + status: "processing", + startedAt: { gte: staleThreshold } + }, + select: { + id: true, + subject: true, + targetMbid: true, + lidarrRef: true, + metadata: true, + startedAt: true, + attempts: true + } + }); + + // Populate in-memory map from database + for (const job of activeJobs) { + const metadata = job.metadata as Record || {}; + this.activeDownloads.set(job.lidarrRef || job.id, { + downloadId: job.lidarrRef || job.id, + albumTitle: job.subject, + albumMbid: job.targetMbid, + artistName: metadata.artistName || "Unknown", + artistMbid: metadata.artistMbid, + albumId: metadata.lidarrAlbumId, + artistId: metadata.lidarrArtistId, + attempts: job.attempts, + startTime: job.startedAt?.getTime() || Date.now(), + userId: metadata.userId, + tier: metadata.tier, + similarity: metadata.similarity + }); + } + + logger.debug(`[DOWNLOAD] Loaded ${activeJobs.length} active downloads from database`); + + return { loaded: activeJobs.length, failed: staleResult.count }; + } + /** * Shutdown the download queue manager (cleanup resources) */ @@ -592,14 +659,14 @@ class DownloadQueueManager { } this.clearTimeout(); this.activeDownloads.clear(); - console.log("Download queue manager shutdown"); + logger.debug("Download queue manager shutdown"); } /** * Link Lidarr download IDs to download jobs (so we can mark them completed later) */ private async linkDownloadJob(downloadId: string, albumMbid: string) { - console.log( + logger.debug( ` [LINK] Attempting to link download job for MBID: ${albumMbid}` ); try { @@ -615,7 +682,7 @@ class DownloadQueueManager { targetMbid: true, }, }); - console.log( + logger.debug( ` [LINK] Found ${existingJobs.length} job(s) with this MBID:`, JSON.stringify(existingJobs, null, 2) ); @@ -629,27 +696,28 @@ class DownloadQueueManager { data: { lidarrRef: downloadId, status: "processing", + startedAt: new Date(), }, }); if (result.count === 0) { - console.log( + logger.debug( ` No matching download jobs found to link with Lidarr ID ${downloadId}` ); - console.log( + logger.debug( ` This means either: no job exists, job already has lidarrRef, or status is not pending/processing` ); } else { - console.log( + logger.debug( ` Linked Lidarr download ${downloadId} to ${result.count} download job(s)` ); } } catch (error: any) { - console.error( + logger.error( ` Failed to persist Lidarr download link:`, error.message ); - console.error(` Error details:`, error); + logger.error(` Error details:`, error); } } } diff --git a/backend/src/services/enrichment.ts b/backend/src/services/enrichment.ts index fc92c9e..c9f4ed4 100644 --- a/backend/src/services/enrichment.ts +++ b/backend/src/services/enrichment.ts @@ -14,6 +14,7 @@ * - Manual override support */ +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; import { lastFmService } from "./lastfm"; import { musicBrainzService } from "./musicbrainz"; @@ -171,7 +172,7 @@ export class EnrichmentService { throw new Error(`Artist ${artistId} not found`); } - console.log(`Enriching artist: ${artist.name}`); + logger.debug(`Enriching artist: ${artist.name}`); const enrichmentData: ArtistEnrichmentData = { confidence: 0, @@ -190,10 +191,10 @@ export class EnrichmentService { if (mbResults.length > 0) { enrichmentData.mbid = mbResults[0].id; enrichmentData.confidence += 0.4; - console.log(` Found MBID: ${enrichmentData.mbid}`); + logger.debug(` Found MBID: ${enrichmentData.mbid}`); } } catch (error) { - console.error(` ✗ MusicBrainz lookup failed:`, error); + logger.error(` MusicBrainz lookup failed:`, error); } } @@ -214,7 +215,7 @@ export class EnrichmentService { lastfmInfo.tags?.tag?.map((t: any) => t.name) || []; enrichmentData.genres = enrichmentData.tags?.slice(0, 3); // Top 3 tags as genres enrichmentData.confidence += 0.3; - console.log( + logger.debug( ` Found Last.fm data: ${ enrichmentData.tags?.length || 0 } tags` @@ -228,10 +229,10 @@ export class EnrichmentService { enrichmentData.similarArtists = similar.map( (a: any) => a.name ); - console.log(` Found ${similar.length} similar artists`); + logger.debug(` Found ${similar.length} similar artists`); } } catch (error) { - console.error( + logger.error( ` ✗ Last.fm lookup failed:`, error instanceof Error ? error.message : error ); @@ -251,16 +252,16 @@ export class EnrichmentService { if (imageResult) { enrichmentData.heroUrl = imageResult.url; enrichmentData.confidence += 0.2; - console.log(` Found artist image from ${imageResult.source}`); + logger.debug(` Found artist image from ${imageResult.source}`); } } catch (error) { - console.error( + logger.error( ` ✗ Artist image lookup failed:`, error instanceof Error ? error.message : error ); } - console.log( + logger.debug( ` Enrichment confidence: ${( enrichmentData.confidence * 100 ).toFixed(0)}%` @@ -294,7 +295,7 @@ export class EnrichmentService { throw new Error(`Album ${albumId} not found`); } - console.log( + logger.debug( `[Enrichment] Processing album: ${album.artist.name} - ${album.title}` ); @@ -335,7 +336,7 @@ export class EnrichmentService { ? new Date(match["first-release-date"]) : undefined; enrichmentData.confidence += 0.5; - console.log(` Found MBID: ${enrichmentData.rgMbid}`); + logger.debug(` Found MBID: ${enrichmentData.rgMbid}`); // Try to get label info from first release try { @@ -355,18 +356,18 @@ export class EnrichmentService { ) { enrichmentData.label = releaseInfo["label-info"][0].label.name; - console.log( + logger.debug( ` Found label: ${enrichmentData.label}` ); } } } catch (error) { - console.log(`Could not fetch label info`); + logger.debug(`Could not fetch label info`); } } } } catch (error) { - console.error(` ✗ MusicBrainz lookup failed:`, error); + logger.error(` MusicBrainz lookup failed:`, error); } } @@ -375,8 +376,7 @@ export class EnrichmentService { try { const lastfmInfo = await lastFmService.getAlbumInfo( album.artist.name, - album.title, - enrichmentData.rgMbid + album.title ); if (lastfmInfo) { @@ -386,14 +386,14 @@ export class EnrichmentService { enrichmentData.trackCount = lastfmInfo.tracks?.track?.length; enrichmentData.confidence += 0.3; - console.log( + logger.debug( ` Found Last.fm data: ${ enrichmentData.tags?.length || 0 } tags` ); } } catch (error) { - console.error(` ✗ Last.fm lookup failed:`, error); + logger.error(` Last.fm lookup failed:`, error); } } @@ -408,16 +408,16 @@ export class EnrichmentService { if (coverResult) { enrichmentData.coverUrl = coverResult.url; enrichmentData.confidence += 0.2; - console.log(` Found cover art from ${coverResult.source}`); + logger.debug(` Found cover art from ${coverResult.source}`); } } catch (error) { - console.error( + logger.error( ` ✗ Cover art lookup failed:`, error instanceof Error ? error.message : error ); } - console.log( + logger.debug( ` Enrichment confidence: ${( enrichmentData.confidence * 100 ).toFixed(0)}%` @@ -443,7 +443,7 @@ export class EnrichmentService { }); if (existingArtist && existingArtist.id !== artistId) { - console.log( + logger.debug( `MBID ${data.mbid} already used by "${existingArtist.name}", skipping MBID update` ); } else { @@ -462,7 +462,7 @@ export class EnrichmentService { where: { id: artistId }, data: updateData, }); - console.log( + logger.debug( ` Saved ${data.genres?.length || 0} genres for artist` ); } @@ -480,6 +480,9 @@ export class EnrichmentService { if (data.rgMbid) updateData.rgMbid = data.rgMbid; if (data.coverUrl) updateData.coverUrl = data.coverUrl; if (data.releaseDate) { + // Store original release date in dedicated field + updateData.originalYear = data.releaseDate.getFullYear(); + // Also update year for backward compatibility (but originalYear takes precedence) updateData.year = data.releaseDate.getFullYear(); } if (data.label) updateData.label = data.label; @@ -492,7 +495,7 @@ export class EnrichmentService { where: { id: albumId }, data: updateData, }); - console.log( + logger.debug( ` Saved album data: ${ data.genres?.length || 0 } genres, label: ${data.label || "none"}` @@ -565,7 +568,7 @@ export class EnrichmentService { }, }); - console.log(`Starting enrichment for ${artists.length} artists...`); + logger.debug(`Starting enrichment for ${artists.length} artists...`); for (const artist of artists) { try { @@ -634,7 +637,7 @@ export class EnrichmentService { item: `${artist.name} - ${album.title}`, error: error.message, }); - console.error( + logger.error( ` ✗ Failed to enrich ${artist.name} - ${album.title}:`, error ); @@ -649,11 +652,11 @@ export class EnrichmentService { item: artist.name, error: error.message, }); - console.error(` ✗ Failed to enrich ${artist.name}:`, error); + logger.error(` Failed to enrich ${artist.name}:`, error); } } - console.log( + logger.debug( `Enrichment complete: ${result.itemsEnriched}/${result.itemsProcessed} items enriched` ); diff --git a/backend/src/services/enrichmentFailureService.ts b/backend/src/services/enrichmentFailureService.ts new file mode 100644 index 0000000..7900e3a --- /dev/null +++ b/backend/src/services/enrichmentFailureService.ts @@ -0,0 +1,354 @@ +/** + * Enrichment Failure Service + * + * Tracks and manages failures during artist/track/audio enrichment. + * Provides visibility into what failed and allows selective retry. + */ + +import { logger } from "../utils/logger"; +import { prisma } from "../utils/db"; + +export interface EnrichmentFailure { + id: string; + entityType: "artist" | "track" | "audio"; + entityId: string; + entityName: string | null; + errorMessage: string | null; + errorCode: string | null; + retryCount: number; + maxRetries: number; + firstFailedAt: Date; + lastFailedAt: Date; + skipped: boolean; + skippedAt: Date | null; + resolved: boolean; + resolvedAt: Date | null; + metadata: any; +} + +export interface RecordFailureInput { + entityType: "artist" | "track" | "audio"; + entityId: string; + entityName?: string; + errorMessage: string; + errorCode?: string; + metadata?: any; +} + +export interface GetFailuresOptions { + entityType?: "artist" | "track" | "audio"; + includeSkipped?: boolean; + includeResolved?: boolean; + limit?: number; + offset?: number; +} + +class EnrichmentFailureService { + /** + * Record a failure (or increment retry count if already exists) + */ + async recordFailure(input: RecordFailureInput): Promise { + const { + entityType, + entityId, + entityName, + errorMessage, + errorCode, + metadata, + } = input; + + // Try to find existing failure + const existing = await prisma.enrichmentFailure.findUnique({ + where: { + entityType_entityId: { + entityType, + entityId, + }, + }, + }); + + if (existing) { + // Update existing failure - cap retry count at maxRetries to prevent unbounded increment + const newRetryCount = Math.min( + existing.retryCount + 1, + existing.maxRetries + ); + + return await prisma.enrichmentFailure.update({ + where: { id: existing.id }, + data: { + errorMessage, + errorCode, + retryCount: newRetryCount, + lastFailedAt: new Date(), + metadata: metadata + ? JSON.parse(JSON.stringify(metadata)) + : existing.metadata, + }, + }) as EnrichmentFailure; + } else { + // Create new failure + return await prisma.enrichmentFailure.create({ + data: { + entityType, + entityId, + entityName, + errorMessage, + errorCode, + retryCount: 1, + maxRetries: 3, + metadata: metadata + ? JSON.parse(JSON.stringify(metadata)) + : null, + }, + }) as EnrichmentFailure; + } + } + + /** + * Get failures with filtering and pagination + */ + async getFailures(options: GetFailuresOptions = {}): Promise<{ + failures: EnrichmentFailure[]; + total: number; + }> { + const { + entityType, + includeSkipped = false, + includeResolved = false, + limit = 100, + offset = 0, + } = options; + + const where: any = {}; + + if (entityType) { + where.entityType = entityType; + } + + if (!includeSkipped) { + where.skipped = false; + } + + if (!includeResolved) { + where.resolved = false; + } + + const [failures, total] = await Promise.all([ + prisma.enrichmentFailure.findMany({ + where, + orderBy: { lastFailedAt: "desc" }, + take: limit, + skip: offset, + }), + prisma.enrichmentFailure.count({ where }), + ]); + + return { failures: failures as unknown as EnrichmentFailure[], total }; + } + + /** + * Get failure counts by type + */ + async getFailureCounts(): Promise<{ + artist: number; + track: number; + audio: number; + total: number; + }> { + const [artistCount, trackCount, audioCount] = await Promise.all([ + prisma.enrichmentFailure.count({ + where: { + entityType: "artist", + resolved: false, + skipped: false, + }, + }), + prisma.enrichmentFailure.count({ + where: { entityType: "track", resolved: false, skipped: false }, + }), + prisma.enrichmentFailure.count({ + where: { entityType: "audio", resolved: false, skipped: false }, + }), + ]); + + return { + artist: artistCount, + track: trackCount, + audio: audioCount, + total: artistCount + trackCount + audioCount, + }; + } + + /** + * Get a single failure by ID + */ + async getFailure(id: string): Promise { + return await prisma.enrichmentFailure.findUnique({ + where: { id }, + }) as unknown as EnrichmentFailure | null; + } + + /** + * Mark failures as skipped (won't be retried automatically) + */ + async skipFailures(ids: string[]): Promise { + const result = await prisma.enrichmentFailure.updateMany({ + where: { id: { in: ids } }, + data: { + skipped: true, + skippedAt: new Date(), + }, + }); + + return result.count; + } + + /** + * Mark failures as resolved (manually fixed) + */ + async resolveFailures(ids: string[]): Promise { + const result = await prisma.enrichmentFailure.updateMany({ + where: { id: { in: ids } }, + data: { + resolved: true, + resolvedAt: new Date(), + }, + }); + + return result.count; + } + + /** + * Reset retry count for failures (prepare for retry) + */ + async resetRetryCount(ids: string[]): Promise { + const result = await prisma.enrichmentFailure.updateMany({ + where: { id: { in: ids } }, + data: { + retryCount: 0, + }, + }); + + return result.count; + } + + /** + * Delete failures (cleanup resolved/old failures) + */ + async deleteFailures(ids: string[]): Promise { + const result = await prisma.enrichmentFailure.deleteMany({ + where: { id: { in: ids } }, + }); + + return result.count; + } + + /** + * Cleanup old resolved failures (older than specified days) + */ + async cleanupOldResolved(olderThanDays: number = 30): Promise { + const cutoffDate = new Date(); + cutoffDate.setDate(cutoffDate.getDate() - olderThanDays); + + const result = await prisma.enrichmentFailure.deleteMany({ + where: { + resolved: true, + resolvedAt: { + lt: cutoffDate, + }, + }, + }); + + logger.debug( + `[Enrichment Failures] Cleaned up ${result.count} old resolved failures` + ); + return result.count; + } + + /** + * Check if an entity has failed too many times + */ + async hasExceededRetries( + entityType: string, + entityId: string + ): Promise { + const failure = await prisma.enrichmentFailure.findUnique({ + where: { + entityType_entityId: { + entityType: entityType as any, + entityId, + }, + }, + }); + + if (!failure) return false; + return failure.retryCount >= failure.maxRetries; + } + + /** + * Clear failure record (reset for fresh retry) + */ + async clearFailure(entityType: string, entityId: string): Promise { + await prisma.enrichmentFailure.deleteMany({ + where: { + entityType: entityType as any, + entityId, + }, + }); + } + + /** + * Clean up failures for entities that no longer exist in the database. + * This resolves orphaned failure records where the track/artist was deleted. + */ + async cleanupOrphanedFailures(): Promise<{ + cleaned: number; + checked: number; + }> { + // Get all unresolved failures + const failures = await prisma.enrichmentFailure.findMany({ + where: { resolved: false, skipped: false }, + select: { id: true, entityType: true, entityId: true }, + }); + + const toResolve: string[] = []; + + for (const failure of failures) { + let exists = false; + + if (failure.entityType === "artist") { + const artist = await prisma.artist.findUnique({ + where: { id: failure.entityId }, + select: { id: true }, + }); + exists = !!artist; + } else if ( + failure.entityType === "track" || + failure.entityType === "audio" + ) { + const track = await prisma.track.findUnique({ + where: { id: failure.entityId }, + select: { id: true }, + }); + exists = !!track; + } + + if (!exists) { + toResolve.push(failure.id); + } + } + + if (toResolve.length > 0) { + await this.resolveFailures(toResolve); + logger.debug( + `[Enrichment Failures] Cleaned up ${toResolve.length} orphaned failures` + ); + } + + return { cleaned: toResolve.length, checked: failures.length }; + } +} + +// Singleton instance +export const enrichmentFailureService = new EnrichmentFailureService(); diff --git a/backend/src/services/enrichmentState.ts b/backend/src/services/enrichmentState.ts new file mode 100644 index 0000000..000bde3 --- /dev/null +++ b/backend/src/services/enrichmentState.ts @@ -0,0 +1,267 @@ +/** + * Enrichment State Management Service + * + * Manages the state of enrichment processes using Redis for cross-process coordination. + * Allows pause/resume/stop controls and tracks current progress. + */ + +import { logger } from "../utils/logger"; +import Redis from "ioredis"; +import { config } from "../config"; + +const ENRICHMENT_STATE_KEY = "enrichment:state"; +const ENRICHMENT_CONTROL_CHANNEL = "enrichment:control"; +const AUDIO_CONTROL_CHANNEL = "audio:analysis:control"; + +export type EnrichmentStatus = "idle" | "running" | "paused" | "stopping"; +export type EnrichmentPhase = "artists" | "tracks" | "audio" | null; + +export interface EnrichmentState { + status: EnrichmentStatus; + startedAt?: string; + pausedAt?: string; + stoppedAt?: string; + currentPhase: EnrichmentPhase; + lastActivity: string; + completionNotificationSent?: boolean; // Prevent repeated completion notifications + stoppingInfo?: { + phase: string; + currentItem: string; + itemsRemaining: number; + }; + + // Progress tracking + artists: { + total: number; + completed: number; + failed: number; + current?: string; // Currently processing artist name + }; + tracks: { + total: number; + completed: number; + failed: number; + current?: string; // Currently processing track + }; + audio: { + total: number; + completed: number; + failed: number; + processing: number; // Currently in worker pool + }; +} + +class EnrichmentStateService { + private redis: Redis; + private publisher: Redis; + + constructor() { + this.redis = new Redis(config.redisUrl); + this.publisher = new Redis(config.redisUrl); + } + + /** + * Get current enrichment state + */ + async getState(): Promise { + const data = await this.redis.get(ENRICHMENT_STATE_KEY); + if (!data) { + return null; + } + return JSON.parse(data); + } + + /** + * Initialize enrichment state + */ + async initializeState(): Promise { + const state: EnrichmentState = { + status: "running", + startedAt: new Date().toISOString(), + currentPhase: "artists", + lastActivity: new Date().toISOString(), + completionNotificationSent: false, // Reset notification flag on new enrichment + artists: { total: 0, completed: 0, failed: 0 }, + tracks: { total: 0, completed: 0, failed: 0 }, + audio: { total: 0, completed: 0, failed: 0, processing: 0 }, + }; + + await this.setState(state); + return state; + } + + /** + * Update enrichment state + */ + async setState(state: EnrichmentState): Promise { + state.lastActivity = new Date().toISOString(); + await this.redis.set(ENRICHMENT_STATE_KEY, JSON.stringify(state)); + } + + /** + * Update specific fields in state + * Auto-initializes state if it doesn't exist + */ + async updateState( + updates: Partial + ): Promise { + let current = await this.getState(); + + // Auto-initialize if state doesn't exist + if (!current) { + logger.debug("[Enrichment State] State not found, initializing..."); + current = await this.initializeState(); + } + + const updated = { ...current, ...updates }; + await this.setState(updated); + return updated; + } + + /** + * Pause enrichment process + */ + async pause(): Promise { + const state = await this.getState(); + if (!state) { + throw new Error("No active enrichment to pause"); + } + + if (state.status !== "running") { + throw new Error(`Cannot pause enrichment in ${state.status} state`); + } + + const updated = await this.updateState({ + status: "paused", + pausedAt: new Date().toISOString(), + }); + + // Notify workers via pub/sub + await this.publisher.publish(ENRICHMENT_CONTROL_CHANNEL, "pause"); + await this.publisher.publish(AUDIO_CONTROL_CHANNEL, "pause"); + + logger.debug("[Enrichment State] Paused"); + return updated; + } + + /** + * Resume enrichment process + */ + async resume(): Promise { + const state = await this.getState(); + if (!state) { + throw new Error("No enrichment state to resume"); + } + + // Idempotent: If already running, return success + if (state.status === "running") { + logger.debug("[Enrichment State] Already running"); + return state; + } + + if (state.status !== "paused") { + throw new Error( + `Cannot resume enrichment in ${state.status} state` + ); + } + + const updated = await this.updateState({ + status: "running", + pausedAt: undefined, + }); + + // Notify workers via pub/sub + await this.publisher.publish(ENRICHMENT_CONTROL_CHANNEL, "resume"); + await this.publisher.publish(AUDIO_CONTROL_CHANNEL, "resume"); + + logger.debug("[Enrichment State] Resumed"); + return updated; + } + + /** + * Stop enrichment process + */ + async stop(): Promise { + const state = await this.getState(); + if (!state) { + throw new Error("No active enrichment to stop"); + } + + // Idempotent: If already idle, return success + if (state.status === "idle") { + logger.debug("[Enrichment State] Already stopped (idle)"); + return state; + } + + const updated = await this.updateState({ + status: "stopping", + stoppedAt: new Date().toISOString(), + }); + + // Notify workers via pub/sub + await this.publisher.publish(ENRICHMENT_CONTROL_CHANNEL, "stop"); + await this.publisher.publish(AUDIO_CONTROL_CHANNEL, "stop"); + + logger.debug("[Enrichment State] Stopping..."); + + // Transition to idle after a delay (workers will clean up) + setTimeout(async () => { + await this.updateState({ status: "idle", currentPhase: null }); + logger.debug("[Enrichment State] Stopped and idle"); + }, 5000); + + return updated; + } + + /** + * Clear enrichment state (set to idle) + */ + async clear(): Promise { + await this.redis.del(ENRICHMENT_STATE_KEY); + logger.debug("[Enrichment State] Cleared"); + } + + /** + * Check if enrichment is currently running + */ + async isRunning(): Promise { + const state = await this.getState(); + return state?.status === "running"; + } + + /** + * Check if enrichment is paused + */ + async isPaused(): Promise { + const state = await this.getState(); + return state?.status === "paused"; + } + + /** + * Check for hung processes (no activity for > 15 minutes) + */ + async detectHang(): Promise { + const state = await this.getState(); + if (!state || state.status !== "running") { + return false; + } + + const lastActivity = new Date(state.lastActivity); + const now = new Date(); + const minutesSinceActivity = + (now.getTime() - lastActivity.getTime()) / (1000 * 60); + + return minutesSinceActivity > 15; + } + + /** + * Cleanup connections + */ + async disconnect(): Promise { + await this.redis.quit(); + await this.publisher.quit(); + } +} + +// Singleton instance +export const enrichmentStateService = new EnrichmentStateService(); diff --git a/backend/src/services/fanart.ts b/backend/src/services/fanart.ts index e57fb74..baec31a 100644 --- a/backend/src/services/fanart.ts +++ b/backend/src/services/fanart.ts @@ -1,4 +1,5 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import { redisClient } from "../utils/redis"; import { getSystemSettings } from "../utils/systemSettings"; @@ -38,7 +39,7 @@ class FanartService { const settings = await getSystemSettings(); if (settings?.fanartEnabled && settings?.fanartApiKey) { this.apiKey = settings.fanartApiKey; - console.log("Fanart.tv configured from database"); + logger.debug("Fanart.tv configured from database"); this.initialized = true; return; } @@ -49,7 +50,7 @@ class FanartService { // Fallback to .env if (process.env.FANART_API_KEY) { this.apiKey = process.env.FANART_API_KEY; - console.log("Fanart.tv configured from .env"); + logger.debug("Fanart.tv configured from .env"); } // Note: Not logging "not configured" here - it's optional and logs are spammy this.initialized = true; @@ -73,7 +74,7 @@ class FanartService { if (redisClient.isOpen) { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(` Fanart.tv: Using cached image`); + logger.debug(` Fanart.tv: Using cached image`); return cached; } } @@ -82,7 +83,7 @@ class FanartService { } try { - console.log(` Fetching from Fanart.tv...`); + logger.debug(` Fetching from Fanart.tv...`); const response = await this.client.get(`/music/${mbid}`, { params: { api_key: this.apiKey }, }); @@ -98,39 +99,39 @@ class FanartService { // If it's just a filename, construct the full URL if (rawUrl && !rawUrl.startsWith("http")) { rawUrl = `https://assets.fanart.tv/fanart/music/${mbid}/artistbackground/${rawUrl}`; - console.log( + logger.debug( ` Fanart.tv: Constructed full URL from filename` ); } imageUrl = rawUrl; - console.log(` Fanart.tv: Found artist background`); + logger.debug(` Fanart.tv: Found artist background`); } else if (data.artistthumb && data.artistthumb.length > 0) { let rawUrl = data.artistthumb[0].url; // If it's just a filename, construct the full URL if (rawUrl && !rawUrl.startsWith("http")) { rawUrl = `https://assets.fanart.tv/fanart/music/${mbid}/artistthumb/${rawUrl}`; - console.log( + logger.debug( ` Fanart.tv: Constructed full URL from filename` ); } imageUrl = rawUrl; - console.log(` Fanart.tv: Found artist thumbnail`); + logger.debug(` Fanart.tv: Found artist thumbnail`); } else if (data.hdmusiclogo && data.hdmusiclogo.length > 0) { let rawUrl = data.hdmusiclogo[0].url; // If it's just a filename, construct the full URL if (rawUrl && !rawUrl.startsWith("http")) { rawUrl = `https://assets.fanart.tv/fanart/music/${mbid}/hdmusiclogo/${rawUrl}`; - console.log( + logger.debug( ` Fanart.tv: Constructed full URL from filename` ); } imageUrl = rawUrl; - console.log(` Fanart.tv: Found HD logo`); + logger.debug(` Fanart.tv: Found HD logo`); } // Cache for 7 days @@ -149,9 +150,9 @@ class FanartService { return imageUrl; } catch (error: any) { if (error.response?.status === 404) { - console.log(`Fanart.tv: No images found`); + logger.debug(`Fanart.tv: No images found`); } else { - console.error(` Fanart.tv error:`, error.message); + logger.error(` Fanart.tv error:`, error.message); } return null; } diff --git a/backend/src/services/fileValidator.ts b/backend/src/services/fileValidator.ts index f65fff5..03d38cf 100644 --- a/backend/src/services/fileValidator.ts +++ b/backend/src/services/fileValidator.ts @@ -1,4 +1,5 @@ import * as fs from "fs"; +import { logger } from "../utils/logger"; import * as path from "path"; import { prisma } from "../utils/db"; import { config } from "../config"; @@ -26,7 +27,7 @@ export class FileValidatorService { duration: 0, }; - console.log("[FileValidator] Starting library validation..."); + logger.debug("[FileValidator] Starting library validation..."); // Get all tracks from the database const tracks = await prisma.track.findMany({ @@ -37,7 +38,7 @@ export class FileValidatorService { }, }); - console.log( + logger.debug( `[FileValidator] Found ${tracks.length} tracks to validate` ); @@ -53,7 +54,7 @@ export class FileValidatorService { // Prevent path traversal attacks if (!absolutePath.startsWith(path.normalize(config.music.musicPath))) { - console.warn( + logger.warn( `[FileValidator] Path traversal attempt detected: ${track.filePath}` ); missingTrackIds.push(track.id); @@ -64,7 +65,7 @@ export class FileValidatorService { const exists = await this.fileExists(absolutePath); if (!exists) { - console.log( + logger.debug( `[FileValidator] Missing file: ${track.filePath} (${track.title})` ); missingTrackIds.push(track.id); @@ -74,12 +75,12 @@ export class FileValidatorService { // Log progress every 100 tracks if (result.tracksChecked % 100 === 0) { - console.log( + logger.debug( `[FileValidator] Progress: ${result.tracksChecked}/${tracks.length} tracks checked, ${missingTrackIds.length} missing` ); } } catch (err: any) { - console.error( + logger.error( `[FileValidator] Error checking ${track.filePath}:`, err.message ); @@ -93,7 +94,7 @@ export class FileValidatorService { // Remove missing tracks from database if (missingTrackIds.length > 0) { - console.log( + logger.debug( `[FileValidator] Removing ${missingTrackIds.length} missing tracks from database...` ); @@ -108,7 +109,7 @@ export class FileValidatorService { result.duration = Date.now() - startTime; - console.log( + logger.debug( `[FileValidator] Validation complete: ${result.tracksChecked} checked, ${result.tracksRemoved} removed (${result.duration}ms)` ); @@ -150,7 +151,7 @@ export class FileValidatorService { // Prevent path traversal attacks if (!absolutePath.startsWith(path.normalize(config.music.musicPath))) { - console.warn( + logger.warn( `[FileValidator] Path traversal attempt detected: ${track.filePath}` ); return false; @@ -159,7 +160,7 @@ export class FileValidatorService { const exists = await this.fileExists(absolutePath); if (!exists) { - console.log( + logger.debug( `[FileValidator] Track file missing, removing from DB: ${track.title}` ); await prisma.track.delete({ diff --git a/backend/src/services/imageProvider.ts b/backend/src/services/imageProvider.ts index 0256c44..e92ac02 100644 --- a/backend/src/services/imageProvider.ts +++ b/backend/src/services/imageProvider.ts @@ -8,6 +8,7 @@ * 4. Last.fm (fallback, often missing) */ +import { logger } from "../utils/logger"; import axios from "axios"; export interface ImageSearchOptions { @@ -36,7 +37,7 @@ export class ImageProviderService { ): Promise { const { timeout = 5000 } = options; - console.log(`[IMAGE] Searching for artist image: ${artistName}`); + logger.debug(`[IMAGE] Searching for artist image: ${artistName}`); // Try Deezer first (most reliable) try { @@ -45,11 +46,11 @@ export class ImageProviderService { timeout ); if (deezerImage) { - console.log(` Found image from Deezer`); + logger.debug(` Found image from Deezer`); return deezerImage; } } catch (error) { - console.log( + logger.debug( ` Deezer failed: ${ error instanceof Error ? error.message : "Unknown error" }` @@ -64,11 +65,11 @@ export class ImageProviderService { timeout ); if (fanartImage) { - console.log(` Found image from Fanart.tv`); + logger.debug(` Found image from Fanart.tv`); return fanartImage; } } catch (error) { - console.log( + logger.debug( `Fanart.tv failed: ${ error instanceof Error ? error.message : "Unknown error" }` @@ -84,11 +85,11 @@ export class ImageProviderService { timeout ); if (mbImage) { - console.log(` Found image from MusicBrainz`); + logger.debug(` Found image from MusicBrainz`); return mbImage; } } catch (error) { - console.log( + logger.debug( `MusicBrainz failed: ${ error instanceof Error ? error.message : "Unknown error" }` @@ -96,7 +97,7 @@ export class ImageProviderService { } } - console.log(` ✗ No artist image found from any source`); + logger.debug(` No artist image found from any source`); return null; } @@ -111,7 +112,7 @@ export class ImageProviderService { ): Promise { const { timeout = 5000 } = options; - console.log( + logger.debug( `[IMAGE] Searching for album cover: ${artistName} - ${albumTitle}` ); @@ -123,11 +124,11 @@ export class ImageProviderService { timeout ); if (deezerCover) { - console.log(` Found cover from Deezer`); + logger.debug(` Found cover from Deezer`); return deezerCover; } } catch (error) { - console.log( + logger.debug( ` Deezer failed: ${ error instanceof Error ? error.message : "Unknown error" }` @@ -142,11 +143,11 @@ export class ImageProviderService { timeout ); if (mbCover) { - console.log(` Found cover from MusicBrainz`); + logger.debug(` Found cover from MusicBrainz`); return mbCover; } } catch (error) { - console.log( + logger.debug( `MusicBrainz failed: ${ error instanceof Error ? error.message : "Unknown error" }` @@ -162,11 +163,11 @@ export class ImageProviderService { timeout ); if (fanartCover) { - console.log(` Found cover from Fanart.tv`); + logger.debug(` Found cover from Fanart.tv`); return fanartCover; } } catch (error) { - console.log( + logger.debug( `Fanart.tv failed: ${ error instanceof Error ? error.message : "Unknown error" }` @@ -174,7 +175,7 @@ export class ImageProviderService { } } - console.log(` ✗ No album cover found from any source`); + logger.debug(` No album cover found from any source`); return null; } @@ -407,7 +408,7 @@ export class ImageProviderService { } } } catch (error) { - console.log( + logger.debug( `Last.fm failed: ${ error instanceof Error ? error.message : "Unknown error" }` diff --git a/backend/src/services/itunes.ts b/backend/src/services/itunes.ts index cf7e935..5f1714f 100644 --- a/backend/src/services/itunes.ts +++ b/backend/src/services/itunes.ts @@ -1,4 +1,5 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import { redisClient } from "../utils/redis"; interface ItunesPodcast { @@ -51,7 +52,7 @@ class ItunesService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } await this.rateLimit(); @@ -60,7 +61,7 @@ class ItunesService { try { await redisClient.setEx(cacheKey, ttlSeconds, JSON.stringify(data)); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return data; @@ -234,13 +235,13 @@ class ItunesService { const keywords = this.extractSearchKeywords(title, description, author); if (keywords.length === 0) { - console.log( + logger.debug( "No keywords extracted for similar podcast search, falling back to title" ); return this.searchPodcasts(title, limit); } - console.log( + logger.debug( ` Searching for similar podcasts using keywords: ${keywords.join(", ")}` ); @@ -275,31 +276,31 @@ class ItunesService { genreId: number, limit = 20 ): Promise { - console.log(`[iTunes SERVICE] getTopPodcastsByGenre called with genre=${genreId}, limit=${limit}`); + logger.debug(`[iTunes SERVICE] getTopPodcastsByGenre called with genre=${genreId}, limit=${limit}`); const cacheKey = `itunes:genre:${genreId}:${limit}`; - console.log(`[iTunes SERVICE] Cache key: ${cacheKey}`); + logger.debug(`[iTunes SERVICE] Cache key: ${cacheKey}`); const result = await this.cachedRequest( cacheKey, async () => { try { - console.log(`[iTunes] Fetching genre ${genreId} from RSS feed...`); + logger.debug(`[iTunes] Fetching genre ${genreId} from RSS feed...`); // Use iTunes RSS feed for top podcasts by genre const response = await this.client.get( `/us/rss/toppodcasts/genre=${genreId}/limit=${limit}/json` ); - console.log(`[iTunes] Response status: ${response.status}`); - console.log(`[iTunes] Has feed data: ${!!response.data?.feed}`); - console.log(`[iTunes] Entries count: ${response.data?.feed?.entry?.length || 0}`); + logger.debug(`[iTunes] Response status: ${response.status}`); + logger.debug(`[iTunes] Has feed data: ${!!response.data?.feed}`); + logger.debug(`[iTunes] Entries count: ${response.data?.feed?.entry?.length || 0}`); const entries = response.data?.feed?.entry || []; // If only one entry, it might not be an array const entriesArray = Array.isArray(entries) ? entries : [entries]; - console.log(`[iTunes] Processing ${entriesArray.length} entries`); + logger.debug(`[iTunes] Processing ${entriesArray.length} entries`); // Convert RSS feed format to our podcast format const podcasts = entriesArray.map((entry: any) => { @@ -315,21 +316,21 @@ class ItunesService { primaryGenreName: entry.category?.attributes?.label, collectionViewUrl: entry.link?.attributes?.href, }; - console.log(`[iTunes] Mapped podcast: ${podcast.collectionName} (ID: ${podcast.collectionId})`); + logger.debug(`[iTunes] Mapped podcast: ${podcast.collectionName} (ID: ${podcast.collectionId})`); return podcast; }).filter((p: any) => p.collectionId > 0); // Filter out invalid entries - console.log(`[iTunes] Returning ${podcasts.length} valid podcasts`); + logger.debug(`[iTunes] Returning ${podcasts.length} valid podcasts`); return podcasts; } catch (error) { - console.error(`[iTunes] ERROR in requestFn:`, error); + logger.error(`[iTunes] ERROR in requestFn:`, error); return []; } }, 2592000 // 30 days ); - console.log(`[iTunes SERVICE] cachedRequest returned ${result.length} podcasts`); + logger.debug(`[iTunes SERVICE] cachedRequest returned ${result.length} podcasts`); return result; } } diff --git a/backend/src/services/lastfm.ts b/backend/src/services/lastfm.ts index 4167a20..43950d1 100644 --- a/backend/src/services/lastfm.ts +++ b/backend/src/services/lastfm.ts @@ -1,4 +1,5 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import * as fuzz from "fuzzball"; import { config } from "../config"; import { redisClient } from "../utils/redis"; @@ -6,6 +7,7 @@ import { getSystemSettings } from "../utils/systemSettings"; import { fanartService } from "./fanart"; import { deezerService } from "./deezer"; import { rateLimiter } from "./rateLimiter"; +import { normalizeToArray } from "../utils/normalize"; interface SimilarArtist { name: string; @@ -39,24 +41,34 @@ class LastFmService { const settings = await getSystemSettings(); if (settings?.lastfmApiKey) { this.apiKey = settings.lastfmApiKey; - console.log("Last.fm configured from user settings"); + logger.debug("Last.fm configured from user settings"); } else if (this.apiKey) { - console.log("Last.fm configured (default app key)"); + logger.debug("Last.fm configured (default app key)"); } } catch (err) { // DB not ready yet, use default/env key if (this.apiKey) { - console.log("Last.fm configured (default app key)"); + logger.debug("Last.fm configured (default app key)"); } } if (!this.apiKey) { - console.warn("Last.fm API key not available"); + logger.warn("Last.fm API key not available"); } this.initialized = true; } + /** + * Refresh the API key from current settings + * Called when system settings are updated to pick up new key + */ + async refreshApiKey(): Promise { + this.initialized = false; + await this.ensureInitialized(); + logger.debug("Last.fm API key refreshed from settings"); + } + private async request(params: Record) { await this.ensureInitialized(); const response = await rateLimiter.execute("lastfm", () => @@ -78,7 +90,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -107,7 +119,7 @@ class LastFmService { JSON.stringify(results) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return results; @@ -117,13 +129,13 @@ class LastFmService { error.response?.status === 404 || error.response?.data?.error === 6 ) { - console.log( + logger.debug( `Artist MBID not found on Last.fm, trying name search: ${artistName}` ); return this.getSimilarArtistsByName(artistName, limit); } - console.error(`Last.fm error for ${artistName}:`, error); + logger.error(`Last.fm error for ${artistName}:`, error); return []; } } @@ -140,7 +152,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -169,12 +181,12 @@ class LastFmService { JSON.stringify(results) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return results; } catch (error) { - console.error(`Last.fm error for ${artistName}:`, error); + logger.error(`Last.fm error for ${artistName}:`, error); return []; } } @@ -188,7 +200,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -202,20 +214,38 @@ class LastFmService { const album = data.album; - // Cache for 30 days - try { - await redisClient.setEx( - cacheKey, - 2592000, - JSON.stringify(album) - ); - } catch (err) { - console.warn("Redis set error:", err); + // Normalize arrays before caching/returning + if (album) { + const normalized = { + ...album, + image: normalizeToArray(album.image), + tags: album.tags ? { + ...album.tags, + tag: normalizeToArray(album.tags.tag) + } : album.tags, + tracks: album.tracks ? { + ...album.tracks, + track: normalizeToArray(album.tracks.track) + } : album.tracks + }; + + // Cache for 30 days + try { + await redisClient.setEx( + cacheKey, + 2592000, + JSON.stringify(normalized) + ); + } catch (err) { + logger.warn("Redis set error:", err); + } + + return normalized; } return album; } catch (error) { - console.error(`Last.fm album info error for ${albumName}:`, error); + logger.error(`Last.fm album info error for ${albumName}:`, error); return null; } } @@ -229,7 +259,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -251,12 +281,12 @@ class LastFmService { JSON.stringify(albums) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return albums; } catch (error) { - console.error(`Last.fm tag albums error for ${tag}:`, error); + logger.error(`Last.fm tag albums error for ${tag}:`, error); return []; } } @@ -270,7 +300,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -293,12 +323,12 @@ class LastFmService { JSON.stringify(tracks) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return tracks; } catch (error) { - console.error( + logger.error( `Last.fm similar tracks error for ${trackName}:`, error ); @@ -319,7 +349,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -348,12 +378,12 @@ class LastFmService { JSON.stringify(tracks) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return tracks; } catch (error) { - console.error(`Last.fm top tracks error for ${artistName}:`, error); + logger.error(`Last.fm top tracks error for ${artistName}:`, error); return []; } } @@ -371,7 +401,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -400,12 +430,12 @@ class LastFmService { JSON.stringify(albums) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return albums; } catch (error) { - console.error(`Last.fm top albums error for ${artistName}:`, error); + logger.error(`Last.fm top albums error for ${artistName}:`, error); return []; } } @@ -428,9 +458,27 @@ class LastFmService { } const data = await this.request(params); - return data.artist; + const artist = data.artist; + + // Normalize arrays before returning + if (artist) { + return { + ...artist, + image: normalizeToArray(artist.image), + tags: artist.tags ? { + ...artist.tags, + tag: normalizeToArray(artist.tags.tag) + } : artist.tags, + similar: artist.similar ? { + ...artist.similar, + artist: normalizeToArray(artist.similar.artist) + } : artist.similar + }; + } + + return artist; } catch (error) { - console.error( + logger.error( `Last.fm artist info error for ${artistName}:`, error ); @@ -538,7 +586,7 @@ class LastFmService { name: artist.name, listeners: parseInt(artist.listeners || "0", 10), url: artist.url, - image: this.getBestImage(artist.image), + image: this.getBestImage(normalizeToArray(artist.image)), mbid: artist.mbid, bio: null, tags: [] as string[], @@ -587,7 +635,7 @@ class LastFmService { album: track.album || null, listeners: parseInt(track.listeners || "0", 10), url: track.url, - image: this.getBestImage(track.image), + image: this.getBestImage(normalizeToArray(track.image)), mbid: track.mbid, }; @@ -633,7 +681,7 @@ class LastFmService { const artists = data.results?.artistmatches?.artist || []; - console.log( + logger.debug( `\n [LAST.FM SEARCH] Found ${artists.length} artists (before filtering)` ); @@ -675,11 +723,11 @@ class LastFmService { wordMatches, }; }) - .filter(({ similarity, wordMatches }) => { + .filter(({ similarity, wordMatches }: { similarity: number; wordMatches: number }) => { if (!queryLower) return true; return similarity >= 50 || wordMatches >= minWordMatches; }) - .sort((a, b) => { + .sort((a: any, b: any) => { return ( Number(b.hasMbid) - Number(a.hasMbid) || b.wordMatches - a.wordMatches || @@ -728,7 +776,7 @@ class LastFmService { uniqueArtists.push(candidate); } } catch (error) { - console.warn( + logger.warn( "[LAST.FM SEARCH] Similar artist fallback failed:", error ); @@ -737,7 +785,7 @@ class LastFmService { const limitedArtists = uniqueArtists.slice(0, limit); - console.log( + logger.debug( ` → Filtered to ${limitedArtists.length} relevant matches (limit: ${limit})` ); @@ -761,7 +809,7 @@ class LastFmService { return [...enriched, ...fast].filter(Boolean); } catch (error) { - console.error("Last.fm artist search error:", error); + logger.error("Last.fm artist search error:", error); return []; } } @@ -781,7 +829,7 @@ class LastFmService { const tracks = data.results?.trackmatches?.track || []; - console.log( + logger.debug( `\n [LAST.FM TRACK SEARCH] Found ${tracks.length} tracks` ); @@ -811,7 +859,7 @@ class LastFmService { return [...enriched, ...fast].filter(Boolean); } catch (error) { - console.error("Last.fm track search error:", error); + logger.error("Last.fm track search error:", error); return []; } } @@ -829,13 +877,96 @@ class LastFmService { format: "json", }); - return data.track; + const track = data.track; + + // Normalize arrays before returning + if (track) { + return { + ...track, + toptags: track.toptags ? { + ...track.toptags, + tag: normalizeToArray(track.toptags.tag) + } : track.toptags, + album: track.album ? { + ...track.album, + image: normalizeToArray(track.album.image) + } : track.album + }; + } + + return track; } catch (error) { // Don't log errors for track info (many tracks don't have full info) return null; } } + /** + * Get the canonical artist name from Last.fm correction API + * Resolves aliases and misspellings to official artist names + * + * @param artistName - The artist name to check for corrections + * @returns The canonical artist name info, or null if no correction found + * + * @example + * getArtistCorrection("of mice") // Returns { corrected: true, canonicalName: "Of Mice & Men", mbid: "..." } + * getArtistCorrection("bjork") // Returns { corrected: true, canonicalName: "Björk", mbid: "..." } + */ + async getArtistCorrection(artistName: string): Promise<{ + corrected: boolean; + canonicalName: string; + mbid?: string; + } | null> { + const cacheKey = `lastfm:correction:${artistName.toLowerCase().trim()}`; + + // Check cache first (30-day TTL) + try { + const cached = await redisClient.get(cacheKey); + if (cached) { + return cached === "null" ? null : JSON.parse(cached); + } + } catch (err) { + logger.warn("Redis get error:", err); + } + + try { + const data = await this.request({ + method: "artist.getCorrection", + artist: artistName, + api_key: this.apiKey, + format: "json", + }); + + const correction = data.corrections?.correction?.artist; + + if (!correction || !correction.name) { + // Cache null result + await redisClient.setEx(cacheKey, 2592000, "null"); + return null; + } + + const result = { + corrected: + correction.name.toLowerCase() !== artistName.toLowerCase(), + canonicalName: correction.name, + mbid: correction.mbid || undefined, + }; + + // Cache for 30 days + await redisClient.setEx(cacheKey, 2592000, JSON.stringify(result)); + + return result; + } catch (error: any) { + // Error 6 = "Artist not found" - cache negative result + if (error.response?.data?.error === 6) { + await redisClient.setEx(cacheKey, 2592000, "null"); + return null; + } + logger.error(`Last.fm correction error for ${artistName}:`, error); + return null; + } + } + /** * Get popular artists from Last.fm charts */ @@ -844,7 +975,7 @@ class LastFmService { // Return empty if no API key configured if (!this.apiKey) { - console.warn( + logger.warn( "Last.fm: Cannot fetch chart artists - no API key configured" ); return []; @@ -858,7 +989,7 @@ class LastFmService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } try { @@ -901,7 +1032,7 @@ class LastFmService { // Last fallback to Last.fm images (but filter placeholders) if (!image) { - const lastFmImage = this.getBestImage(artist.image); + const lastFmImage = this.getBestImage(normalizeToArray(artist.image)); if ( lastFmImage && !lastFmImage.includes( @@ -933,12 +1064,12 @@ class LastFmService { JSON.stringify(detailedArtists) ); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return detailedArtists; } catch (error) { - console.error("Last.fm chart artists error:", error); + logger.error("Last.fm chart artists error:", error); return []; } } diff --git a/backend/src/services/lidarr.ts b/backend/src/services/lidarr.ts index ce9334f..6479960 100644 --- a/backend/src/services/lidarr.ts +++ b/backend/src/services/lidarr.ts @@ -1,13 +1,77 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import { config } from "../config"; import { getSystemSettings } from "../utils/systemSettings"; +// ============================================ +// STRUCTURED ERROR TYPES +// ============================================ + +/** + * Error types for music acquisition failures + * Used to determine fallback strategies + */ +export enum AcquisitionErrorType { + ARTIST_NOT_FOUND = "ARTIST_NOT_FOUND", + ALBUM_NOT_FOUND = "ALBUM_NOT_FOUND", + NO_INDEXER_RESULTS = "NO_INDEXER_RESULTS", + NO_RELEASES_AVAILABLE = "NO_RELEASES_AVAILABLE", + INDEXER_TIMEOUT = "INDEXER_TIMEOUT", + METADATA_ERROR = "METADATA_ERROR", + NETWORK_ERROR = "NETWORK_ERROR", + UNKNOWN = "UNKNOWN", +} + +/** + * Structured error class for acquisition failures + * Includes error type and recoverability flag for fallback logic + */ +export class AcquisitionError extends Error { + public readonly type: AcquisitionErrorType; + public readonly isRecoverable: boolean; + public readonly originalError?: Error; + + constructor( + message: string, + type: AcquisitionErrorType, + isRecoverable: boolean = true, + originalError?: Error + ) { + super(message); + this.name = "AcquisitionError"; + this.type = type; + this.isRecoverable = isRecoverable; + this.originalError = originalError; + + // Maintain proper stack trace + if (Error.captureStackTrace) { + Error.captureStackTrace(this, AcquisitionError); + } + } +} + interface LidarrArtist { id: number; artistName: string; foreignArtistId: string; // MusicBrainz ID monitored: boolean; tags?: number[]; // Tag IDs + artistType?: string; + qualityProfileId?: number; + metadataProfileId?: number; + rootFolderPath?: string; + statistics?: { + albumCount?: number; + trackFileCount?: number; + trackCount?: number; + totalTrackCount?: number; + sizeOnDisk?: number; + percentOfTracks?: number; + }; + ratings?: { + votes?: number; + value?: number; + }; } interface LidarrTag { @@ -62,7 +126,7 @@ class LidarrService { const apiKey = settings.lidarrApiKey || config.lidarr?.apiKey; if (url && apiKey) { - console.log("Lidarr configured from database"); + logger.debug("Lidarr configured from database"); this.client = axios.create({ baseURL: url, timeout: 30000, @@ -72,19 +136,19 @@ class LidarrService { }); this.enabled = true; } else { - console.warn(" Lidarr enabled but missing URL or API key"); + logger.warn(" Lidarr enabled but missing URL or API key"); this.enabled = false; } } else if (config.lidarr) { // Fallback to .env - console.log("Lidarr configured from .env"); + logger.debug("Lidarr configured from .env"); this.enabled = true; } else { - console.log(" Lidarr not enabled"); + logger.debug(" Lidarr not enabled"); this.enabled = false; } } catch (error) { - console.error("Failed to load Lidarr settings:", error); + logger.error("Failed to load Lidarr settings:", error); // Keep .env config if database fails } @@ -112,7 +176,7 @@ class LidarrService { const rootFolders = response.data; if (rootFolders.length === 0) { - console.warn(" No root folders configured in Lidarr!"); + logger.warn(" No root folders configured in Lidarr!"); return requestedPath; } @@ -127,11 +191,11 @@ class LidarrService { // Fallback to first available root folder const fallback = rootFolders[0].path; - console.log(` Root folder "${requestedPath}" not found in Lidarr`); - console.log(` Using fallback: "${fallback}"`); + logger.debug(` Root folder "${requestedPath}" not found in Lidarr`); + logger.debug(` Using fallback: "${fallback}"`); return fallback; } catch (error) { - console.error("Error checking root folders:", error); + logger.error("Error checking root folders:", error); return requestedPath; // Return requested path and let Lidarr error if needed } } @@ -142,6 +206,11 @@ class LidarrService { ): Promise { await this.ensureInitialized(); + // DEBUG: Log exact parameters received + logger.debug( + `[LIDARR_SEARCH_ARTIST] artistName="${artistName}", mbid="${mbid}"` + ); + if (!this.enabled || !this.client) { throw new Error("Lidarr not enabled"); } @@ -161,16 +230,24 @@ class LidarrService { // FALLBACK: Lidarr's metadata server may be having issues // If we have an MBID, create a minimal artist object from our own MusicBrainz data if (mbid) { - console.log(` [FALLBACK] Lidarr lookup failed, using direct MusicBrainz data for MBID: ${mbid}`); - + logger.debug( + ` [FALLBACK] Lidarr lookup failed, using direct MusicBrainz data for MBID: ${mbid}` + ); + try { // Import MusicBrainz service dynamically to avoid circular deps - const { musicBrainzService } = await import("./musicbrainz"); - + const { musicBrainzService } = await import( + "./musicbrainz" + ); + // Get artist info from MusicBrainz directly - const mbArtists = await musicBrainzService.searchArtist(artistName, 5); - const mbArtist = mbArtists?.find(a => a.id === mbid) || mbArtists?.[0]; - + const mbArtists = await musicBrainzService.searchArtist( + artistName, + 5 + ); + const mbArtist = + mbArtists?.find((a: any) => a.id === mbid) || mbArtists?.[0]; + if (mbArtist) { // Create a minimal Lidarr-compatible artist object const fallbackArtist: LidarrArtist = { @@ -183,29 +260,42 @@ class LidarrService { metadataProfileId: 1, rootFolderPath: "/music", tags: [], - statistics: { albumCount: 0 } + statistics: { albumCount: 0 }, }; - - console.log(` [FALLBACK] Created artist from MusicBrainz: ${fallbackArtist.artistName}`); + + logger.debug( + ` [FALLBACK] Created artist from MusicBrainz: ${fallbackArtist.artistName}` + ); return [fallbackArtist]; } } catch (mbError: any) { - console.error(` [FALLBACK] MusicBrainz lookup also failed:`, mbError.message); + logger.error( + ` [FALLBACK] MusicBrainz lookup also failed:`, + mbError.message + ); } } return response.data || []; } catch (error) { - console.error("Lidarr artist search error:", error); - + logger.error("Lidarr artist search error:", error); + // FALLBACK on error too if (mbid) { - console.log(` [FALLBACK] Lidarr error, trying MusicBrainz for MBID: ${mbid}`); + logger.debug( + ` [FALLBACK] Lidarr error, trying MusicBrainz for MBID: ${mbid}` + ); try { - const { musicBrainzService } = await import("./musicbrainz"); - const mbArtists = await musicBrainzService.searchArtist(artistName, 5); - const mbArtist = mbArtists?.find(a => a.id === mbid) || mbArtists?.[0]; - + const { musicBrainzService } = await import( + "./musicbrainz" + ); + const mbArtists = await musicBrainzService.searchArtist( + artistName, + 5 + ); + const mbArtist = + mbArtists?.find((a: any) => a.id === mbid) || mbArtists?.[0]; + if (mbArtist) { const fallbackArtist: LidarrArtist = { id: 0, @@ -217,16 +307,21 @@ class LidarrService { metadataProfileId: 1, rootFolderPath: "/music", tags: [], - statistics: { albumCount: 0 } + statistics: { albumCount: 0 }, }; - console.log(` [FALLBACK] Created artist from MusicBrainz: ${fallbackArtist.artistName}`); + logger.debug( + ` [FALLBACK] Created artist from MusicBrainz: ${fallbackArtist.artistName}` + ); return [fallbackArtist]; } } catch (mbError: any) { - console.error(` [FALLBACK] MusicBrainz also failed:`, mbError.message); + logger.error( + ` [FALLBACK] MusicBrainz also failed:`, + mbError.message + ); } } - + return []; } } @@ -241,6 +336,11 @@ class LidarrService { ): Promise { await this.ensureInitialized(); + // DEBUG: Log exact parameters received + logger.debug( + `[LIDARR_ADD_ARTIST] artistName="${artistName}", mbid="${mbid}"` + ); + if (!this.enabled || !this.client) { throw new Error("Lidarr not enabled"); } @@ -250,7 +350,9 @@ class LidarrService { if (isDiscovery) { discoveryTagId = await this.getOrCreateDiscoveryTag(); if (discoveryTagId) { - console.log(`[LIDARR] Will apply discovery tag (ID: ${discoveryTagId}) to artist`); + logger.debug( + `[LIDARR] Will apply discovery tag (ID: ${discoveryTagId}) to artist` + ); } } @@ -260,22 +362,22 @@ class LidarrService { rootFolderPath ); - console.log( + logger.debug( ` Searching Lidarr for artist: "${artistName}"${ mbid ? ` (MBID: ${mbid})` : " (no MBID - using name search)" }` ); - console.log(` Root folder: ${validRootFolder}`); + logger.debug(` Root folder: ${validRootFolder}`); // Search for artist (by MBID if available, otherwise by name) const searchResults = await this.searchArtist(artistName, mbid); if (searchResults.length === 0) { - console.error(` Artist not found in Lidarr: ${artistName}`); + logger.error(` Artist not found in Lidarr: ${artistName}`); return null; } - console.log(` Found ${searchResults.length} results from Lidarr`); + logger.debug(` Found ${searchResults.length} results from Lidarr`); let artistData: LidarrArtist; @@ -286,10 +388,10 @@ class LidarrService { ); if (!exactMatch) { - console.error( + logger.error( ` No exact MBID match found for: ${artistName} (${mbid})` ); - console.log( + logger.debug( " Available results:", searchResults.map((a) => ({ name: a.artistName, @@ -306,7 +408,7 @@ class LidarrService { exactMatch.artistType === "Group" && (exactMatch.statistics?.albumCount || 0) === 0 ) { - console.log( + logger.debug( ` Exact MBID match is a Group with 0 albums - checking for better match...` ); @@ -322,7 +424,7 @@ class LidarrService { ); if (betterMatch) { - console.log( + logger.debug( ` Found better match: "${ betterMatch.artistName }" (Type: ${betterMatch.artistType}, Albums: ${ @@ -331,13 +433,13 @@ class LidarrService { ); artistData = betterMatch; } else { - console.log( + logger.debug( ` No better match found, using Group entry` ); artistData = exactMatch; } } else { - console.log( + logger.debug( `Exact match found: "${exactMatch.artistName}" (Type: ${ exactMatch.artistType }, Albums: ${exactMatch.statistics?.albumCount || 0})` @@ -346,7 +448,7 @@ class LidarrService { } } else { // FALLBACK: No MBID - Use smart filtering for best match - console.log(" No MBID available - using smart selection..."); + logger.debug(" No MBID available - using smart selection..."); // Filter and score results const scoredResults = searchResults.map((artist) => { @@ -399,9 +501,9 @@ class LidarrService { scoredResults.sort((a, b) => b.score - a.score); // Log candidates for debugging - console.log(" Candidates:"); + logger.debug(" Candidates:"); scoredResults.slice(0, 3).forEach((item, i) => { - console.log( + logger.debug( ` ${i + 1}. "${item.artist.artistName}" - Type: ${ item.artist.artistType || "Unknown" } - Albums: ${ @@ -422,20 +524,22 @@ class LidarrService { ); if (exists) { - console.log(`Artist already in Lidarr: ${artistName}`); + logger.debug(`Artist already in Lidarr: ${artistName}`); // If this is a discovery add and artist doesn't have discovery tag, add it if (isDiscovery && discoveryTagId) { const existingTags = exists.tags || []; if (!existingTags.includes(discoveryTagId)) { - console.log(` Adding discovery tag to existing artist...`); + logger.debug( + ` Adding discovery tag to existing artist...` + ); await this.addTagsToArtist(exists.id, [discoveryTagId]); } } // If monitorAllAlbums is true, update the artist to monitor all albums if (monitorAllAlbums) { - console.log(` Updating artist to monitor all albums...`); + logger.debug(` Updating artist to monitor all albums...`); try { // Update artist settings const updated = await this.client.put( @@ -453,7 +557,7 @@ class LidarrService { ); const albums = albumsResponse.data; - console.log( + logger.debug( ` Found ${albums.length} albums to monitor` ); @@ -472,7 +576,7 @@ class LidarrService { // Trigger search for all albums if requested if (searchForMissingAlbums && albums.length > 0) { - console.log( + logger.debug( ` Triggering search for ${albums.length} albums...` ); await this.client.post("/api/v1/command", { @@ -481,12 +585,12 @@ class LidarrService { }); } - console.log( + logger.debug( ` Updated existing artist and monitored all albums` ); return updated.data; } catch (error: any) { - console.error( + logger.error( ` Failed to update artist:`, error.message ); @@ -518,26 +622,35 @@ class LidarrService { artistPayload.tags = [discoveryTagId]; } - const response = await this.client.post("/api/v1/artist", artistPayload); + const response = await this.client.post( + "/api/v1/artist", + artistPayload + ); + + logger.debug( + `Added artist to Lidarr: ${artistName}${ + isDiscovery ? " (tagged as discovery)" : "" + }` + ); - console.log(`Added artist to Lidarr: ${artistName}${isDiscovery ? " (tagged as discovery)" : ""}`); - // Trigger metadata refresh to ensure album catalog is populated if (!searchForMissingAlbums) { - console.log(` Triggering metadata refresh for new artist...`); + logger.debug(` Triggering metadata refresh for new artist...`); try { await this.client.post("/api/v1/command", { name: "RefreshArtist", artistId: response.data.id, }); } catch (refreshError) { - console.warn(` Metadata refresh command failed (non-blocking)`); + logger.warn( + ` Metadata refresh command failed (non-blocking)` + ); } } - + return response.data; } catch (error: any) { - console.error( + logger.error( "Lidarr add artist error:", error.response?.data || error.message ); @@ -560,7 +673,7 @@ class LidarrService { const searchTerm = rgMbid ? `lidarr:${rgMbid}` : `${artistName} ${albumTitle}`; - console.log(` Searching Lidarr for album: ${searchTerm}`); + logger.debug(` Searching Lidarr for album: ${searchTerm}`); const response = await this.client.get("/api/v1/album/lookup", { params: { @@ -568,24 +681,50 @@ class LidarrService { }, }); - console.log(` Found ${response.data.length} album result(s)`); + logger.debug(` Found ${response.data.length} album result(s)`); return response.data; } catch (error: any) { - console.error(` ✗ Lidarr album search error: ${error.message}`); + logger.error(`Lidarr album search error: ${error.message}`); if (error.response?.data) { - console.error(` Response:`, error.response.data); + logger.error(` Response:`, error.response.data); } return []; } } + /** + * Extract base album title by removing edition markers + * E.g., "Abbey Road (Remastered)" → "Abbey Road" + */ + private extractBaseTitle(title: string): string { + return ( + title + // Remove parenthetical edition markers + .replace( + /\s*\([^)]*(?:remaster|deluxe|expanded|anniversary|bonus|special|limited|collector|platinum|japan|uk|us|import|super deluxe)[^)]*\)\s*/gi, + "" + ) + // Remove bracketed edition markers + .replace( + /\s*\[[^\]]*(?:remaster|deluxe|expanded|anniversary|bonus|special|limited|collector|platinum|japan|uk|us|import|super deluxe)[^\]]*\]\s*/gi, + "" + ) + // Remove trailing edition markers with dash + .replace( + /\s*[-–—]\s*(remaster|deluxe|expanded|anniversary|bonus|special|limited|collector|platinum|japan|uk|us|import|super deluxe).*$/gi, + "" + ) + .trim() + ); + } + /** * Get all albums for an artist that exist in Lidarr's catalog * Used for same-artist fallback to avoid trying MusicBrainz albums that Lidarr can't find */ async getArtistAlbums(artistMbid: string): Promise { if (!this.client) { - console.warn("Lidarr not enabled"); + logger.warn("Lidarr not enabled"); return []; } @@ -597,19 +736,58 @@ class LidarrService { ); if (!artist) { - console.log(` Artist not found in Lidarr: ${artistMbid}`); + logger.debug(` Artist not found in Lidarr: ${artistMbid}`); return []; } // Get albums for this artist - const albumsResponse = await this.client.get(`/api/v1/album?artistId=${artist.id}`); + const albumsResponse = await this.client.get( + `/api/v1/album?artistId=${artist.id}` + ); return albumsResponse.data || []; } catch (error: any) { - console.error(` Failed to get artist albums: ${error.message}`); + logger.error(` Failed to get artist albums: ${error.message}`); return []; } } + /** + * Wait for a Lidarr command to complete + * @param commandId The command ID to poll + * @param timeoutMs Maximum time to wait (default: 30s) + * @param pollIntervalMs Time between polls (default: 2s) + * @returns The completed command status + */ + private async waitForCommand( + commandId: number, + timeoutMs: number = 30000, + pollIntervalMs: number = 2000 + ): Promise<{ status: string; message: string }> { + const startTime = Date.now(); + + while (Date.now() - startTime < timeoutMs) { + const response = await this.client!.get( + `/api/v1/command/${commandId}` + ); + const { status, message, body } = response.data; + + // Check if command finished (completed, failed, aborted) + if (status !== "started" && status !== "queued") { + logger.debug( + ` Command ${commandId} completed with status: ${status}` + ); + return { + status, + message: message || body?.records?.[0]?.message || "", + }; + } + + await new Promise((resolve) => setTimeout(resolve, pollIntervalMs)); + } + + throw new Error(`Command ${commandId} timed out after ${timeoutMs}ms`); + } + async addAlbum( rgMbid: string, artistName: string, @@ -625,9 +803,13 @@ class LidarrService { } try { - console.log(` Adding album: ${albumTitle} by ${artistName}${isDiscovery ? " (discovery)" : ""}`); - console.log(` Album MBID: ${rgMbid}`); - console.log(` Artist MBID: ${artistMbid || "none"}`); + logger.debug( + ` Adding album: ${albumTitle} by ${artistName}${ + isDiscovery ? " (discovery)" : "" + }` + ); + logger.debug(` Album MBID: ${rgMbid}`); + logger.debug(` Artist MBID: ${artistMbid || "none"}`); // NEW APPROACH: Add artist first, then find album in their catalog // This avoids the broken external album search API @@ -647,14 +829,16 @@ class LidarrService { if (discoveryTagId) { const existingTags = artist.tags || []; if (!existingTags.includes(discoveryTagId)) { - console.log(` Adding discovery tag to existing artist...`); + logger.debug( + ` Adding discovery tag to existing artist...` + ); await this.addTagsToArtist(artist.id, [discoveryTagId]); } } } if (!artist && artistMbid) { - console.log(` Adding artist first: ${artistName}`); + logger.debug(` Adding artist first: ${artistName}`); // Add artist WITHOUT searching for all albums // Pass isDiscovery to tag the artist appropriately @@ -668,64 +852,76 @@ class LidarrService { ); if (!artist) { - console.error(` ✗ Failed to add artist`); + logger.error(` Failed to add artist`); return null; } justAddedArtist = true; - console.log( + logger.debug( ` Artist added: ${artist.artistName} (ID: ${artist.id})` ); - console.log( + logger.debug( ` Waiting for Lidarr to populate album catalog...` ); } else if (!artist) { - console.error(` ✗ Artist not found and no MBID provided`); + logger.error(` Artist not found and no MBID provided`); return null; } else { - console.log( + logger.debug( ` Artist already exists: ${artist.artistName} (ID: ${artist.id})` ); } // Get artist's albums from Lidarr let artistAlbums: LidarrAlbum[] = []; - + // First check - get current album list const artistAlbumsResponse = await this.client.get( `/api/v1/album?artistId=${artist.id}` ); artistAlbums = artistAlbumsResponse.data; - + // If we just added the artist and no albums yet, wait for metadata to populate if (artistAlbums.length === 0 && justAddedArtist) { - console.log(` Waiting for Lidarr to fetch album metadata...`); + logger.debug(` Waiting for Lidarr to fetch album metadata...`); - // Increased timeout: 15 attempts * 3 seconds = 45 seconds total + // Increased timeout: 20 attempts * 3 seconds = 60 seconds total // Large artist catalogs (e.g., prolific bands) need more time - const maxAttempts = 15; + const maxAttempts = 20; const retryDelay = 3000; // 3 seconds between retries - + for (let attempt = 1; attempt <= maxAttempts; attempt++) { - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - + await new Promise((resolve) => + setTimeout(resolve, retryDelay) + ); + const retryResponse = await this.client.get( `/api/v1/album?artistId=${artist.id}` ); artistAlbums = retryResponse.data; - + if (artistAlbums.length > 0) { - console.log(` Albums loaded after ${attempt * 3}s`); + logger.debug(` Albums loaded after ${attempt * 3}s`); break; } - + if (attempt < maxAttempts) { - console.log(` Attempt ${attempt}/${maxAttempts}: Still waiting...`); + logger.debug( + ` Attempt ${attempt}/${maxAttempts}: Still waiting...` + ); + } else { + logger.warn( + ` Timeout reached after ${ + maxAttempts * 3 + }s - artist catalog may still be populating` + ); } } } else if (artistAlbums.length === 0 && !justAddedArtist) { // Artist exists but has 0 albums - try refreshing metadata once - console.log(` Artist exists but has 0 albums - refreshing metadata...`); + logger.debug( + ` Artist exists but has 0 albums - refreshing metadata...` + ); try { await this.client.post("/api/v1/command", { name: "RefreshArtist", @@ -733,17 +929,17 @@ class LidarrService { }); // Wait for refresh to complete await new Promise((resolve) => setTimeout(resolve, 5000)); - + const retryResponse = await this.client.get( `/api/v1/album?artistId=${artist.id}` ); artistAlbums = retryResponse.data; } catch (refreshError) { - console.warn(` Metadata refresh failed`); + logger.warn(` Metadata refresh failed`); } } - console.log( + logger.debug( ` Found ${artistAlbums.length} albums for ${artist.artistName}` ); @@ -756,7 +952,7 @@ class LidarrService { // IMPORTANT: We removed loose matching (base name, first word) because it caused // wrong albums to be downloaded (e.g., "A Trip To The Mystery Planet" matching "A Funk Odyssey") if (!albumData) { - console.log( + logger.debug( ` Album MBID not found, trying STRICT name match for: ${albumTitle}` ); @@ -766,20 +962,25 @@ class LidarrService { .toLowerCase() .replace(/\(.*?\)/g, "") // Remove parenthetical content (deluxe edition, remaster, etc.) .replace(/\[.*?\]/g, "") // Remove bracketed content - .replace(/[-–—]\s*(deluxe|remaster|bonus|special|anniversary|expanded|limited|collector).*$/i, "") // Remove edition suffixes + .replace( + /[-–—]\s*(deluxe|remaster|bonus|special|anniversary|expanded|limited|collector).*$/i, + "" + ) // Remove edition suffixes .replace(/[^\w\s]/g, "") // Remove remaining punctuation .replace(/\s+/g, " ") // Normalize whitespace .trim(); const targetTitle = normalizeTitle(albumTitle); - console.log(` Normalized target: "${targetTitle}"`); + logger.debug(` Normalized target: "${targetTitle}"`); // Try exact normalized match first albumData = artistAlbums.find( (a: LidarrAlbum) => normalizeTitle(a.title) === targetTitle ); if (albumData) { - console.log(` ✓ Matched exact normalized: "${albumData.title}"`); + logger.debug( + ` Matched exact normalized: "${albumData.title}"` + ); } // Try partial match ONLY if one contains the other completely @@ -789,15 +990,26 @@ class LidarrService { const normalized = normalizeTitle(a.title); // Only match if one is a substring of the other AND they share significant content // The shorter one must be at least 60% of the longer one's length - const shorter = normalized.length < targetTitle.length ? normalized : targetTitle; - const longer = normalized.length >= targetTitle.length ? normalized : targetTitle; - if (longer.includes(shorter) && shorter.length >= longer.length * 0.6) { + const shorter = + normalized.length < targetTitle.length + ? normalized + : targetTitle; + const longer = + normalized.length >= targetTitle.length + ? normalized + : targetTitle; + if ( + longer.includes(shorter) && + shorter.length >= longer.length * 0.6 + ) { return true; } return false; }); if (albumData) { - console.log(` ✓ Matched partial (contained): "${albumData.title}"`); + logger.debug( + ` Matched partial (contained): "${albumData.title}"` + ); } } @@ -807,64 +1019,86 @@ class LidarrService { // and let the discovery system find a different album if (albumData) { - console.log( + logger.debug( ` Final match: "${albumData.title}" (MBID: ${albumData.foreignAlbumId})` ); } else { - console.log(` ✗ No strict match found - will NOT use loose matching to avoid wrong albums`); + logger.debug( + ` No strict match found - will NOT use loose matching to avoid wrong albums` + ); } } if (!albumData) { - console.error( + logger.error( ` ✗ Album "${albumTitle}" not found in artist's ${artistAlbums.length} albums` ); if (artistAlbums.length > 0) { - console.log(` Looking for: "${albumTitle}" (MBID: ${rgMbid})`); - console.log(` Available albums in Lidarr (showing up to 10):`); + logger.debug( + ` Looking for: "${albumTitle}" (MBID: ${rgMbid})` + ); + logger.debug( + ` Available albums in Lidarr (showing up to 10):` + ); artistAlbums.slice(0, 10).forEach((a: LidarrAlbum) => { - console.log(` - "${a.title}" (${a.foreignAlbumId})`); + logger.debug( + ` - "${a.title}" (${a.foreignAlbumId})` + ); }); } - // Return null - let the caller handle replacement logic - // We should NOT download a random album that isn't what was requested - return null; + // Throw structured error - allows fallback to Soulseek + throw new AcquisitionError( + `Album "${albumTitle}" not found in Lidarr catalog for ${artistName}`, + AcquisitionErrorType.ALBUM_NOT_FOUND, + true // isRecoverable - Soulseek can try + ); } - console.log(` Found album in catalog: ${albumData.title} (ID: ${albumData.id})`); + logger.debug( + ` Found album in catalog: ${albumData.title} (ID: ${albumData.id})` + ); // Ensure artist is monitored (might have been added with monitoring disabled) if (!artist.monitored) { - console.log(` Enabling artist monitoring...`); + logger.debug(` Enabling artist monitoring...`); await this.client.put(`/api/v1/artist/${artist.id}`, { ...artist, monitored: true, }); - console.log(` Artist monitoring enabled`); + logger.debug(` Artist monitoring enabled`); } else { - console.log(` Artist already monitored`); + logger.debug(` Artist already monitored`); } // CRITICAL: Fetch the FULL album data from Lidarr // The album list endpoint may return incomplete data - console.log(` Fetching full album data from Lidarr...`); - const fullAlbumResponse = await this.client.get(`/api/v1/album/${albumData.id}`); + logger.debug(` Fetching full album data from Lidarr...`); + const fullAlbumResponse = await this.client.get( + `/api/v1/album/${albumData.id}` + ); const fullAlbumData = fullAlbumResponse.data; - - console.log(` Full album data retrieved:`, JSON.stringify({ - id: fullAlbumData.id, - title: fullAlbumData.title, - monitored: fullAlbumData.monitored, - foreignAlbumId: fullAlbumData.foreignAlbumId, - anyReleaseOk: fullAlbumData.anyReleaseOk, - profileId: fullAlbumData.profileId, - releases: fullAlbumData.releases?.length || 0, - }, null, 2)); + + logger.debug( + ` Full album data retrieved:`, + JSON.stringify( + { + id: fullAlbumData.id, + title: fullAlbumData.title, + monitored: fullAlbumData.monitored, + foreignAlbumId: fullAlbumData.foreignAlbumId, + anyReleaseOk: fullAlbumData.anyReleaseOk, + profileId: fullAlbumData.profileId, + releases: fullAlbumData.releases?.length || 0, + }, + null, + 2 + ) + ); // ALWAYS monitor and search for the album, even if already monitored // This ensures Lidarr picks up the request // Preserve user's anyReleaseOk setting - we'll only change it if search fails later - console.log(` Setting album monitoring to true...`); + logger.debug(` Setting album monitoring to true...`); const updateResponse = await this.client.put( `/api/v1/album/${fullAlbumData.id}`, @@ -874,27 +1108,84 @@ class LidarrService { } ); - console.log(` PUT response monitored: ${updateResponse.data.monitored}`); - + logger.debug( + ` PUT response monitored: ${updateResponse.data.monitored}` + ); + // CRITICAL: Re-fetch the album to verify the change actually persisted - const verifyResponse = await this.client.get(`/api/v1/album/${fullAlbumData.id}`); + const verifyResponse = await this.client.get( + `/api/v1/album/${fullAlbumData.id}` + ); const verifiedMonitored = verifyResponse.data.monitored; - - console.log(` Album monitoring VERIFIED after re-fetch: ${verifiedMonitored}`); - + + logger.debug( + ` Album monitoring VERIFIED after re-fetch: ${verifiedMonitored}` + ); + if (!verifiedMonitored) { - console.error(` ✗ CRITICAL: Album monitoring failed to persist!`); - console.error(` Full album data we sent:`, JSON.stringify(fullAlbumData, null, 2).slice(0, 500)); - console.error(` Response from GET after PUT:`, JSON.stringify(verifyResponse.data, null, 2).slice(0, 500)); + logger.error(` CRITICAL: Album monitoring failed to persist!`); + logger.error( + ` Full album data we sent:`, + JSON.stringify(fullAlbumData, null, 2).slice(0, 500) + ); + logger.error( + ` Response from GET after PUT:`, + JSON.stringify(verifyResponse.data, null, 2).slice(0, 500) + ); } // Use the verified album data const updatedAlbum = verifyResponse.data; + // ============================================================ + // PHASE 2.1: Proactive anyReleaseOk for edition variants + // ============================================================ + const editionPatterns = [ + /\(remaster/i, + /\(deluxe/i, + /\(expanded/i, + /\(anniversary/i, + /\(bonus/i, + /\(special/i, + /\(limited/i, + /\(collector/i, + /\(super deluxe/i, + /\(platinum/i, + /\(japan/i, + /\(uk/i, + /\(us/i, + /\(import/i, + /\[remaster/i, + /\[deluxe/i, + ]; + const isEditionVariant = editionPatterns.some((p) => + p.test(albumTitle) + ); + const foundAlbumIsEdition = editionPatterns.some((p) => + p.test(updatedAlbum.title || "") + ); + const needsAnyReleaseOk = isEditionVariant || foundAlbumIsEdition; + + if (needsAnyReleaseOk && !updatedAlbum.anyReleaseOk) { + logger.debug( + ` Edition variant detected ("${albumTitle}") - enabling anyReleaseOk proactively` + ); + + await this.client.put(`/api/v1/album/${updatedAlbum.id}`, { + ...updatedAlbum, + anyReleaseOk: true, + }); + + updatedAlbum.anyReleaseOk = true; + logger.debug( + ` anyReleaseOk enabled - Lidarr will accept any release of this album` + ); + } + // Check if album has releases - if not, refresh artist metadata from MusicBrainz const releaseCount = updatedAlbum.releases?.length || 0; if (releaseCount === 0) { - console.warn( + logger.warn( ` Album has 0 releases - refreshing artist metadata from MusicBrainz...` ); @@ -904,7 +1195,7 @@ class LidarrService { artistId: artist.id, }); - console.log(` Waiting for metadata refresh to complete...`); + logger.debug(` Waiting for metadata refresh to complete...`); // Wait for refresh to complete (Lidarr processes this asynchronously) await new Promise((resolve) => setTimeout(resolve, 5000)); @@ -915,82 +1206,255 @@ class LidarrService { const refreshedAlbum = refreshedAlbumResponse.data; const newReleaseCount = refreshedAlbum.releases?.length || 0; - console.log( + logger.debug( ` After refresh: ${newReleaseCount} releases found` ); if (newReleaseCount === 0) { - console.warn(` Still no releases after refresh!`); - console.warn( + logger.warn(` Still no releases after refresh!`); + logger.warn( ` This album may not be properly indexed in MusicBrainz yet.` ); - console.warn(` Download will be attempted but may fail.`); + logger.warn(` Download will be attempted but may fail.`); } } // ALWAYS trigger search to download the album - console.log(` Triggering album search command for album ID ${updatedAlbum.id}...`); + logger.debug( + ` Triggering album search command for album ID ${updatedAlbum.id}...` + ); const searchResponse = await this.client.post("/api/v1/command", { name: "AlbumSearch", albumIds: [updatedAlbum.id], }); - console.log( + logger.debug( ` Search command sent (Command ID: ${searchResponse.data.id})` ); - // Wait a moment and check if search found anything - await new Promise((resolve) => setTimeout(resolve, 3000)); - const commandStatus = await this.client.get(`/api/v1/command/${searchResponse.data.id}`); - console.log(` Search result: ${commandStatus.data.message || 'pending'}`); - - if (commandStatus.data.message?.includes('0 reports')) { - // Check if anyReleaseOk is already true - if not, try enabling it - if (!updatedAlbum.anyReleaseOk) { - console.log(` [RETRY] No results with strict matching. Trying with anyReleaseOk=true...`); - - // Re-fetch album to ensure we have latest data - const refetchResponse = await this.client.get(`/api/v1/album/${updatedAlbum.id}`); - const refetchedAlbum = refetchResponse.data; - - // Enable anyReleaseOk - await this.client.put(`/api/v1/album/${updatedAlbum.id}`, { - ...refetchedAlbum, - anyReleaseOk: true, - }); - console.log(` Set anyReleaseOk=true for album`); - - // Retry search - console.log(` Retrying album search...`); - const retryResponse = await this.client.post("/api/v1/command", { - name: "AlbumSearch", - albumIds: [updatedAlbum.id], - }); - - // Wait and check retry result - await new Promise((resolve) => setTimeout(resolve, 3000)); - const retryStatus = await this.client.get(`/api/v1/command/${retryResponse.data.id}`); - console.log(` Retry search result: ${retryStatus.data.message || 'pending'}`); - - if (retryStatus.data.message?.includes('0 reports')) { - console.warn(` [FAIL] Still no releases found even with anyReleaseOk=true.`); - throw new Error("No releases available - indexers found no matching downloads"); - } else { - console.log(` ✓ Found releases after enabling anyReleaseOk`); - } - } else { - console.warn(` [FAIL] No releases grabbed automatically (anyReleaseOk already true).`); - throw new Error("No releases available - indexers found no matching downloads"); - } - } + // Wait for search to complete (with 30s timeout) + try { + const result = await this.waitForCommand( + searchResponse.data.id, + 30000 + ); - console.log(` Album download started: ${updatedAlbum.title}`); - return updatedAlbum; + if (result.message?.includes("0 reports")) { + // ============================================================ + // PHASE 2.3: Enhanced diagnostics for 0 reports + // ============================================================ + try { + const albumDetails = await this.client.get( + `/api/v1/album/${updatedAlbum.id}` + ); + const releaseCount = + albumDetails.data.releases?.length || 0; + const anyReleaseOkStatus = + albumDetails.data.anyReleaseOk; + + logger.debug( + ` [DIAGNOSTIC] Initial search returned 0 reports` + ); + logger.debug( + ` [DIAGNOSTIC] Album "${updatedAlbum.title}" has ${releaseCount} releases defined in Lidarr` + ); + logger.debug( + ` [DIAGNOSTIC] anyReleaseOk: ${anyReleaseOkStatus}` + ); + logger.debug( + ` [DIAGNOSTIC] Album MBID: ${updatedAlbum.foreignAlbumId}` + ); + + if (releaseCount === 0) { + logger.warn( + ` [DIAGNOSTIC] ⚠️ Album has 0 releases in Lidarr - metadata may be incomplete` + ); + } + + if (!this._indexerCountLogged) { + try { + const indexers = await this.client.get( + "/api/v1/indexer" + ); + const enabledIndexers = indexers.data.filter( + (i: any) => + i.enableRss || i.enableAutomaticSearch + ); + logger.debug( + ` [DIAGNOSTIC] ${enabledIndexers.length} enabled indexers configured in Lidarr` + ); + + if (enabledIndexers.length === 0) { + logger.error( + ` [DIAGNOSTIC] ❌ No enabled indexers - Lidarr cannot search for releases` + ); + } + this._indexerCountLogged = true; + } catch (indexerError) { + // Ignore indexer check errors + } + } + } catch (diagError) { + // Ignore diagnostic errors + } + + // No sources found - try anyReleaseOk if not already enabled + if (!updatedAlbum.anyReleaseOk) { + logger.debug( + ` No results with strict matching. Trying anyReleaseOk=true...` + ); + + // Enable anyReleaseOk and retry + await this.client.put( + `/api/v1/album/${updatedAlbum.id}`, + { + ...updatedAlbum, + anyReleaseOk: true, + } + ); + + const retryResponse = await this.client.post( + "/api/v1/command", + { + name: "AlbumSearch", + albumIds: [updatedAlbum.id], + } + ); + + const retryResult = await this.waitForCommand( + retryResponse.data.id, + 30000 + ); + + if (retryResult.message?.includes("0 reports")) { + // ============================================================ + // PHASE 2.2: Fallback to base album title + // ============================================================ + const baseAlbumTitle = this.extractBaseTitle(albumTitle); + + if (baseAlbumTitle !== albumTitle && baseAlbumTitle.length > 2) { + logger.debug( + ` Trying base album title fallback: "${albumTitle}" → "${baseAlbumTitle}"` + ); + + const normalizeForMatch = (s: string) => + s.toLowerCase().replace(/[^\w\s]/g, "").trim(); + const normalizedBase = normalizeForMatch(baseAlbumTitle); + + const baseMatch = artistAlbums.find((a: LidarrAlbum) => { + const normalizedAlbumTitle = normalizeForMatch(a.title); + + if (normalizedAlbumTitle === normalizedBase) return true; + + const shorter = + normalizedAlbumTitle.length < normalizedBase.length + ? normalizedAlbumTitle + : normalizedBase; + const longer = + normalizedAlbumTitle.length >= normalizedBase.length + ? normalizedAlbumTitle + : normalizedBase; + if ( + longer.includes(shorter) && + shorter.length >= longer.length * 0.7 + ) { + return true; + } + + return false; + }); + + if (baseMatch && baseMatch.id !== updatedAlbum.id) { + logger.debug( + ` Found base album: "${baseMatch.title}" (ID: ${baseMatch.id})` + ); + logger.debug(` Attempting download of base album instead...`); + + await this.client.put(`/api/v1/album/${baseMatch.id}`, { + ...baseMatch, + monitored: true, + anyReleaseOk: true, + }); + + const baseSearchResponse = await this.client.post( + "/api/v1/command", + { + name: "AlbumSearch", + albumIds: [baseMatch.id], + } + ); + + try { + const baseResult = await this.waitForCommand( + baseSearchResponse.data.id, + 30000 + ); + + if (baseResult.message?.includes("0 reports")) { + logger.warn( + ` Base album "${baseMatch.title}" also has no releases` + ); + throw new Error( + `No releases available for "${albumTitle}" or base album "${baseMatch.title}" - ` + + `check indexer configuration and album availability` + ); + } + + logger.debug(` Base album download started: ${baseMatch.title}`); + return baseMatch; + } catch (baseError: any) { + if (baseError.message?.includes("No releases")) { + throw baseError; + } + if (baseError.message?.includes("timed out")) { + logger.warn( + ` Base album search timed out, may still be searching` + ); + return baseMatch; + } + throw baseError; + } + } else { + logger.debug( + ` No base album match found in artist catalog (${artistAlbums.length} albums)` + ); + } + } + + throw new AcquisitionError( + `No releases available for "${albumTitle}" - indexers found no matching downloads. ` + + `Album may not be available on configured indexers, or MBID mismatch between Lidarr and indexers.`, + AcquisitionErrorType.NO_RELEASES_AVAILABLE, + true + ); + } + } else { + throw new Error( + "No releases available - indexers found no matching downloads" + ); + } + } + + logger.debug(` Album download started: ${updatedAlbum.title}`); + return updatedAlbum; + } catch (error: any) { + if (error.message?.includes("No releases available")) { + throw error; // Re-throw for startDownload to handle + } + if (error.message?.includes("timed out")) { + // Command timed out - album might still be searching + logger.warn( + ` Search command timed out, album may still be searching` + ); + return updatedAlbum; // Return album, let timeout handling catch it later + } + throw error; + } } catch (error: any) { // Re-throw our own errors (like "No releases available") if (error.message?.includes("No releases available")) { throw error; } - console.error( + logger.error( "Lidarr add album error:", error.response?.data || error.message ); @@ -1010,9 +1474,9 @@ class LidarrService { name: "RescanFolders", }); - console.log("Triggered Lidarr library rescan"); + logger.debug("Triggered Lidarr library rescan"); } catch (error) { - console.error("Lidarr rescan error:", error); + logger.error("Lidarr rescan error:", error); throw error; } } @@ -1028,7 +1492,7 @@ class LidarrService { const response = await this.client.get("/api/v1/artist"); return response.data; } catch (error) { - console.error("Lidarr get artists error:", error); + logger.error("Lidarr get artists error:", error); return []; } } @@ -1044,7 +1508,10 @@ class LidarrService { await this.ensureInitialized(); if (!this.enabled || !this.client) { - return { success: false, message: "Lidarr not enabled or configured" }; + return { + success: false, + message: "Lidarr not enabled or configured", + }; } if (!mbid || mbid.startsWith("temp-")) { @@ -1054,14 +1521,24 @@ class LidarrService { try { // Find artist in Lidarr by foreignArtistId (MBID) const artists = await this.getArtists(); - const lidarrArtist = artists.find(a => a.foreignArtistId === mbid); + const lidarrArtist = artists.find( + (a) => a.foreignArtistId === mbid + ); if (!lidarrArtist) { - console.log(`[LIDARR] Artist with MBID ${mbid} not found in Lidarr`); - return { success: true, message: "Artist not in Lidarr (already removed or never added)" }; + logger.debug( + `[LIDARR] Artist with MBID ${mbid} not found in Lidarr` + ); + return { + success: true, + message: + "Artist not in Lidarr (already removed or never added)", + }; } - console.log(`[LIDARR] Deleting artist: ${lidarrArtist.artistName} (ID: ${lidarrArtist.id})`); + logger.debug( + `[LIDARR] Deleting artist: ${lidarrArtist.artistName} (ID: ${lidarrArtist.id})` + ); // Delete the artist from Lidarr (with timeout to prevent hanging) await this.client.delete(`/api/v1/artist/${lidarrArtist.id}`, { @@ -1072,11 +1549,22 @@ class LidarrService { timeout: 30000, // 30 second timeout }); - console.log(`[LIDARR] Successfully deleted artist: ${lidarrArtist.artistName}`); - return { success: true, message: `Deleted ${lidarrArtist.artistName} from Lidarr` }; + logger.debug( + `[LIDARR] Successfully deleted artist: ${lidarrArtist.artistName}` + ); + return { + success: true, + message: `Deleted ${lidarrArtist.artistName} from Lidarr`, + }; } catch (error: any) { - console.error("[LIDARR] Delete artist error:", error?.message || error); - return { success: false, message: error?.message || "Failed to delete from Lidarr" }; + logger.error( + "[LIDARR] Delete artist error:", + error?.message || error + ); + return { + success: false, + message: error?.message || "Failed to delete from Lidarr", + }; } } @@ -1091,36 +1579,48 @@ class LidarrService { await this.ensureInitialized(); if (!this.enabled || !this.client) { - return { success: false, message: "Lidarr not enabled or configured" }; + return { + success: false, + message: "Lidarr not enabled or configured", + }; } try { - console.log(`[LIDARR] Deleting album ID: ${lidarrAlbumId}`); + logger.debug(`[LIDARR] Deleting album ID: ${lidarrAlbumId}`); // First get the album to check for track files - const albumResponse = await this.client.get(`/api/v1/album/${lidarrAlbumId}`); + const albumResponse = await this.client.get( + `/api/v1/album/${lidarrAlbumId}` + ); const album = albumResponse.data; const artistId = album.artistId; const albumTitle = album.title || "Unknown"; if (deleteFiles) { // Get track files for this album - const trackFilesResponse = await this.client.get("/api/v1/trackFile", { - params: { albumId: lidarrAlbumId }, - }); - + const trackFilesResponse = await this.client.get( + "/api/v1/trackFile", + { + params: { albumId: lidarrAlbumId }, + } + ); + const trackFiles = trackFilesResponse.data; - + if (trackFiles && trackFiles.length > 0) { // Delete each track file for (const trackFile of trackFiles) { try { - await this.client.delete(`/api/v1/trackFile/${trackFile.id}`); + await this.client.delete( + `/api/v1/trackFile/${trackFile.id}` + ); } catch (e) { // Ignore individual file deletion errors } } - console.log(`[LIDARR] Deleted ${trackFiles.length} track files for album: ${albumTitle}`); + logger.debug( + `[LIDARR] Deleted ${trackFiles.length} track files for album: ${albumTitle}` + ); } } @@ -1130,11 +1630,22 @@ class LidarrService { monitored: false, }); - console.log(`[LIDARR] Successfully unmonitored album: ${albumTitle}`); - return { success: true, message: `Deleted files and unmonitored ${albumTitle}` }; + logger.debug( + `[LIDARR] Successfully unmonitored album: ${albumTitle}` + ); + return { + success: true, + message: `Deleted files and unmonitored ${albumTitle}`, + }; } catch (error: any) { - console.error("[LIDARR] Delete album error:", error?.message || error); - return { success: false, message: error?.message || "Failed to delete album from Lidarr" }; + logger.error( + "[LIDARR] Delete album error:", + error?.message || error + ); + return { + success: false, + message: error?.message || "Failed to delete album from Lidarr", + }; } } @@ -1177,7 +1688,7 @@ class LidarrService { if (error.response?.status === 404) { return false; } - console.error("Lidarr album check error:", error.message); + logger.error("Lidarr album check error:", error.message); return false; } } @@ -1186,7 +1697,10 @@ class LidarrService { * Check if an album exists in Lidarr by artist name and album title * Handles MBID mismatches between MusicBrainz and Lidarr */ - async isAlbumAvailableByTitle(artistName: string, albumTitle: string): Promise { + async isAlbumAvailableByTitle( + artistName: string, + albumTitle: string + ): Promise { await this.ensureInitialized(); if (!this.enabled || !this.client) { @@ -1202,9 +1716,10 @@ class LidarrService { const artists = artistsResponse.data || []; // Find matching artist by name - const matchingArtist = artists.find((a: any) => - a.artistName?.toLowerCase().trim() === normalizedArtist || - a.sortName?.toLowerCase().trim() === normalizedArtist + const matchingArtist = artists.find( + (a: any) => + a.artistName?.toLowerCase().trim() === normalizedArtist || + a.sortName?.toLowerCase().trim() === normalizedArtist ); if (!matchingArtist) { @@ -1220,7 +1735,10 @@ class LidarrService { // Check if any album matches the title and has files for (const album of albums) { const albumTitleNorm = album.title?.toLowerCase().trim() || ""; - if (albumTitleNorm === normalizedAlbum || albumTitleNorm.includes(normalizedAlbum)) { + if ( + albumTitleNorm === normalizedAlbum || + albumTitleNorm.includes(normalizedAlbum) + ) { const hasFiles = album.statistics?.percentOfTracks > 0; if (hasFiles) { return true; @@ -1230,7 +1748,7 @@ class LidarrService { return false; } catch (error: any) { - console.error("Lidarr album check by title error:", error.message); + logger.error("Lidarr album check by title error:", error.message); return false; } } @@ -1272,7 +1790,7 @@ class LidarrService { const response = await this.client.get("/api/v1/tag"); return response.data || []; } catch (error: any) { - console.error("[LIDARR] Failed to get tags:", error.message); + logger.error("[LIDARR] Failed to get tags:", error.message); return []; } } @@ -1289,10 +1807,12 @@ class LidarrService { try { const response = await this.client.post("/api/v1/tag", { label }); - console.log(`[LIDARR] Created tag: ${label} (ID: ${response.data.id})`); + logger.debug( + `[LIDARR] Created tag: ${label} (ID: ${response.data.id})` + ); return response.data; } catch (error: any) { - console.error("[LIDARR] Failed to create tag:", error.message); + logger.error("[LIDARR] Failed to create tag:", error.message); return null; } } @@ -1302,6 +1822,7 @@ class LidarrService { * Returns the tag ID, caching it for subsequent calls */ private discoveryTagId: number | null = null; + private _indexerCountLogged: boolean = false; async getOrCreateDiscoveryTag(): Promise { await this.ensureInitialized(); @@ -1318,10 +1839,14 @@ class LidarrService { try { // Check if tag already exists const tags = await this.getTags(); - const existingTag = tags.find(t => t.label === DISCOVERY_TAG_LABEL); + const existingTag = tags.find( + (t) => t.label === DISCOVERY_TAG_LABEL + ); if (existingTag) { - console.log(`[LIDARR] Found existing discovery tag (ID: ${existingTag.id})`); + logger.debug( + `[LIDARR] Found existing discovery tag (ID: ${existingTag.id})` + ); this.discoveryTagId = existingTag.id; return existingTag.id; } @@ -1335,7 +1860,10 @@ class LidarrService { return null; } catch (error: any) { - console.error("[LIDARR] Failed to get/create discovery tag:", error.message); + logger.error( + "[LIDARR] Failed to get/create discovery tag:", + error.message + ); return null; } } @@ -1343,7 +1871,10 @@ class LidarrService { /** * Add tags to an artist */ - async addTagsToArtist(artistId: number, tagIds: number[]): Promise { + async addTagsToArtist( + artistId: number, + tagIds: number[] + ): Promise { await this.ensureInitialized(); if (!this.enabled || !this.client) { @@ -1352,7 +1883,9 @@ class LidarrService { try { // Get current artist data - const response = await this.client.get(`/api/v1/artist/${artistId}`); + const response = await this.client.get( + `/api/v1/artist/${artistId}` + ); const artist = response.data; // Merge new tags with existing (avoid duplicates) @@ -1365,10 +1898,15 @@ class LidarrService { tags: mergedTags, }); - console.log(`[LIDARR] Added tags ${tagIds} to artist ${artist.artistName}`); + logger.debug( + `[LIDARR] Added tags ${tagIds} to artist ${artist.artistName}` + ); return true; } catch (error: any) { - console.error("[LIDARR] Failed to add tags to artist:", error.message); + logger.error( + "[LIDARR] Failed to add tags to artist:", + error.message + ); return false; } } @@ -1376,7 +1914,10 @@ class LidarrService { /** * Remove tags from an artist */ - async removeTagsFromArtist(artistId: number, tagIds: number[]): Promise { + async removeTagsFromArtist( + artistId: number, + tagIds: number[] + ): Promise { await this.ensureInitialized(); if (!this.enabled || !this.client) { @@ -1385,12 +1926,16 @@ class LidarrService { try { // Get current artist data - const response = await this.client.get(`/api/v1/artist/${artistId}`); + const response = await this.client.get( + `/api/v1/artist/${artistId}` + ); const artist = response.data; // Remove specified tags const existingTags = artist.tags || []; - const filteredTags = existingTags.filter((t: number) => !tagIds.includes(t)); + const filteredTags = existingTags.filter( + (t: number) => !tagIds.includes(t) + ); // Update artist with filtered tags await this.client.put(`/api/v1/artist/${artistId}`, { @@ -1398,10 +1943,15 @@ class LidarrService { tags: filteredTags, }); - console.log(`[LIDARR] Removed tags ${tagIds} from artist ${artist.artistName}`); + logger.debug( + `[LIDARR] Removed tags ${tagIds} from artist ${artist.artistName}` + ); return true; } catch (error: any) { - console.error("[LIDARR] Failed to remove tags from artist:", error.message); + logger.error( + "[LIDARR] Failed to remove tags from artist:", + error.message + ); return false; } } @@ -1421,9 +1971,12 @@ class LidarrService { const artists: LidarrArtist[] = response.data; // Filter artists that have the specified tag - return artists.filter(artist => artist.tags?.includes(tagId)); + return artists.filter((artist) => artist.tags?.includes(tagId)); } catch (error: any) { - console.error("[LIDARR] Failed to get artists by tag:", error.message); + logger.error( + "[LIDARR] Failed to get artists by tag:", + error.message + ); return []; } } @@ -1458,22 +2011,31 @@ class LidarrService { // Find artist by MBID const artists = await this.getArtists(); - const artist = artists.find(a => a.foreignArtistId === artistMbid); + const artist = artists.find( + (a) => a.foreignArtistId === artistMbid + ); if (!artist) { - console.log(`[LIDARR] Artist ${artistMbid} not found in Lidarr`); + logger.debug( + `[LIDARR] Artist ${artistMbid} not found in Lidarr` + ); return true; // Not an error - artist might not be in Lidarr } // Check if artist has the discovery tag if (!artist.tags?.includes(tagId)) { - console.log(`[LIDARR] Artist ${artist.artistName} doesn't have discovery tag`); + logger.debug( + `[LIDARR] Artist ${artist.artistName} doesn't have discovery tag` + ); return true; // Already doesn't have tag } return await this.removeTagsFromArtist(artist.id, [tagId]); } catch (error: any) { - console.error("[LIDARR] Failed to remove discovery tag:", error.message); + logger.error( + "[LIDARR] Failed to remove discovery tag:", + error.message + ); return false; } } @@ -1505,7 +2067,7 @@ class LidarrService { if (error.response?.status === 404) { return { success: true, message: "Artist already removed" }; } - console.error("[LIDARR] Delete artist by ID error:", error.message); + logger.error("[LIDARR] Delete artist by ID error:", error.message); return { success: false, message: error.message }; } } @@ -1526,14 +2088,18 @@ class LidarrService { } try { - console.log(`[LIDARR] Fetching releases for album ID: ${lidarrAlbumId}`); + logger.debug( + `[LIDARR] Fetching releases for album ID: ${lidarrAlbumId}` + ); const response = await this.client.get("/api/v1/release", { params: { albumId: lidarrAlbumId }, timeout: 60000, // 60s timeout for indexer searches }); const releases: LidarrRelease[] = response.data || []; - console.log(`[LIDARR] Found ${releases.length} releases from indexers`); + logger.debug( + `[LIDARR] Found ${releases.length} releases from indexers` + ); // Sort by preferred criteria (Lidarr already sorts by quality/preferred words) // but we can add seeders as a secondary sort for torrents @@ -1553,7 +2119,7 @@ class LidarrService { return releases; } catch (error: any) { - console.error(`[LIDARR] Failed to fetch releases:`, error.message); + logger.error(`[LIDARR] Failed to fetch releases:`, error.message); return []; } } @@ -1570,20 +2136,25 @@ class LidarrService { } try { - console.log(`[LIDARR] Grabbing release: ${release.title}`); - console.log(` GUID: ${release.guid}`); - console.log(` Indexer: ${release.indexer || 'unknown'}`); - console.log(` Size: ${Math.round((release.size || 0) / 1024 / 1024)} MB`); + logger.debug(`[LIDARR] Grabbing release: ${release.title}`); + logger.debug(` GUID: ${release.guid}`); + logger.debug(` Indexer: ${release.indexer || "unknown"}`); + logger.debug( + ` Size: ${Math.round((release.size || 0) / 1024 / 1024)} MB` + ); await this.client.post("/api/v1/release", { guid: release.guid, indexerId: release.indexerId || 0, }); - console.log(`[LIDARR] Release grabbed successfully`); + logger.debug(`[LIDARR] Release grabbed successfully`); return true; } catch (error: any) { - console.error(`[LIDARR] Failed to grab release:`, error.response?.data || error.message); + logger.error( + `[LIDARR] Failed to grab release:`, + error.response?.data || error.message + ); return false; } } @@ -1610,11 +2181,15 @@ class LidarrService { ); if (!queueItem) { - console.log(`[LIDARR] Download ${downloadId} not found in queue (may already be removed)`); + logger.debug( + `[LIDARR] Download ${downloadId} not found in queue (may already be removed)` + ); return true; // Consider it success if not in queue } - console.log(`[LIDARR] Blocklisting and removing: ${queueItem.title}`); + logger.debug( + `[LIDARR] Blocklisting and removing: ${queueItem.title}` + ); await this.client.delete(`/api/v1/queue/${queueItem.id}`, { params: { @@ -1624,10 +2199,15 @@ class LidarrService { }, }); - console.log(`[LIDARR] Successfully blocklisted: ${queueItem.title}`); + logger.debug( + `[LIDARR] Successfully blocklisted: ${queueItem.title}` + ); return true; } catch (error: any) { - console.error(`[LIDARR] Failed to blocklist:`, error.response?.data || error.message); + logger.error( + `[LIDARR] Failed to blocklist:`, + error.response?.data || error.message + ); return false; } } @@ -1647,11 +2227,13 @@ class LidarrService { params: { page: 1, pageSize: 100 }, }); - return response.data.records.find( - (item: any) => item.downloadId === downloadId - ) || null; + return ( + response.data.records.find( + (item: any) => item.downloadId === downloadId + ) || null + ); } catch (error: any) { - console.error(`[LIDARR] Failed to find queue item:`, error.message); + logger.error(`[LIDARR] Failed to find queue item:`, error.message); return null; } } @@ -1660,44 +2242,54 @@ class LidarrService { * Get upcoming and recent releases from Lidarr calendar * Returns albums releasing within the specified date range for monitored artists */ - async getCalendar(startDate: Date, endDate: Date): Promise { + async getCalendar( + startDate: Date, + endDate: Date + ): Promise { await this.ensureInitialized(); - + if (!this.client) { - console.log("[LIDARR] Not configured - cannot fetch calendar"); + logger.debug("[LIDARR] Not configured - cannot fetch calendar"); return []; } try { - const start = startDate.toISOString().split('T')[0]; - const end = endDate.toISOString().split('T')[0]; - + const start = startDate.toISOString().split("T")[0]; + const end = endDate.toISOString().split("T")[0]; + const response = await this.client.get(`/api/v1/calendar`, { params: { start, end, includeArtist: true, - } + }, }); - const releases: CalendarRelease[] = response.data.map((album: any) => ({ - id: album.id, - title: album.title, - artistName: album.artist?.artistName || 'Unknown Artist', - artistId: album.artist?.id, - artistMbid: album.artist?.foreignArtistId, - albumMbid: album.foreignAlbumId, - releaseDate: album.releaseDate, - monitored: album.monitored, - grabbed: album.grabbed || false, - hasFile: album.statistics?.percentOfTracks === 100, - coverUrl: album.images?.find((img: any) => img.coverType === 'cover')?.remoteUrl || null, - })); + const releases: CalendarRelease[] = response.data.map( + (album: any) => ({ + id: album.id, + title: album.title, + artistName: album.artist?.artistName || "Unknown Artist", + artistId: album.artist?.id, + artistMbid: album.artist?.foreignArtistId, + albumMbid: album.foreignAlbumId, + releaseDate: album.releaseDate, + monitored: album.monitored, + grabbed: album.grabbed || false, + hasFile: album.statistics?.percentOfTracks === 100, + coverUrl: + album.images?.find( + (img: any) => img.coverType === "cover" + )?.remoteUrl || null, + }) + ); - console.log(`[LIDARR] Calendar: Found ${releases.length} releases between ${start} and ${end}`); + logger.debug( + `[LIDARR] Calendar: Found ${releases.length} releases between ${start} and ${end}` + ); return releases; } catch (error: any) { - console.error(`[LIDARR] Failed to fetch calendar:`, error.message); + logger.error(`[LIDARR] Failed to fetch calendar:`, error.message); return []; } } @@ -1705,9 +2297,11 @@ class LidarrService { /** * Get all monitored artists from Lidarr */ - async getMonitoredArtists(): Promise<{ id: number; name: string; mbid: string }[]> { + async getMonitoredArtists(): Promise< + { id: number; name: string; mbid: string }[] + > { await this.ensureInitialized(); - + if (!this.client) { return []; } @@ -1722,7 +2316,10 @@ class LidarrService { mbid: artist.foreignArtistId, })); } catch (error: any) { - console.error(`[LIDARR] Failed to fetch monitored artists:`, error.message); + logger.error( + `[LIDARR] Failed to fetch monitored artists:`, + error.message + ); return []; } } @@ -1776,6 +2373,8 @@ interface QueueItem { trackedDownloadStatus: string; trackedDownloadState: string; statusMessages: { title: string; messages: string[] }[]; + sizeleft?: number; + size?: number; } interface QueueResponse { @@ -1818,7 +2417,6 @@ const FAILED_IMPORT_PATTERNS = [ "No files found are eligible for import", "Not an upgrade for existing", "Not a Custom Format upgrade", - "Has missing tracks", // Individual tracks from discography packs "missing tracks", "Album match is not close enough", // Lidarr matching threshold failure "Artist name mismatch", // Manual import required - artist doesn't match @@ -1864,7 +2462,7 @@ export async function cleanStuckDownloads( } ); - console.log( + logger.debug( ` Queue cleaner: checking ${response.data.records.length} items` ); @@ -1874,12 +2472,12 @@ export async function cleanStuckDownloads( item.statusMessages?.flatMap((sm) => sm.messages) || []; // Log ALL items to understand what states we're seeing - console.log(` - ${item.title}`); - console.log( + logger.debug(` - ${item.title}`); + logger.debug( ` Status: ${item.status}, TrackedStatus: ${item.trackedDownloadStatus}, State: ${item.trackedDownloadState}` ); if (allMessages.length > 0) { - console.log(` Messages: ${allMessages.join("; ")}`); + logger.debug(` Messages: ${allMessages.join("; ")}`); } // Check for pattern matches in messages @@ -1899,7 +2497,8 @@ export async function cleanStuckDownloads( // Don't wait for timeout, clean up immediately const isImportFailed = item.trackedDownloadState === "importFailed"; - const shouldRemove = hasFailedPattern || isStuckWarning || isImportFailed; + const shouldRemove = + hasFailedPattern || isStuckWarning || isImportFailed; if (shouldRemove) { const reason = isImportFailed @@ -1907,7 +2506,7 @@ export async function cleanStuckDownloads( : hasFailedPattern ? "failed pattern match" : "stuck warning state"; - console.log(` [REMOVE] Removing ${item.title} (${reason})`); + logger.debug(` [REMOVE] Removing ${item.title} (${reason})`); try { // Remove from queue, blocklist the release, trigger new search @@ -1921,11 +2520,11 @@ export async function cleanStuckDownloads( }); removed.push(item.title); - console.log(` Removed and blocklisted: ${item.title}`); + logger.debug(` Removed and blocklisted: ${item.title}`); } catch (deleteError: any) { // Item might already be gone - that's fine if (deleteError.response?.status !== 404) { - console.error( + logger.error( ` Failed to remove ${item.title}:`, deleteError.message ); @@ -1935,14 +2534,14 @@ export async function cleanStuckDownloads( } if (removed.length > 0) { - console.log( + logger.debug( ` Queue cleaner: removed ${removed.length} stuck item(s)` ); } return { removed: removed.length, items: removed }; } catch (error: any) { - console.error("Queue clean failed:", error.message); + logger.error("Queue clean failed:", error.message); throw error; } } @@ -1977,7 +2576,7 @@ export async function getRecentCompletedDownloads( return new Date(record.date) >= cutoff; }); } catch (error: any) { - console.error("Failed to fetch Lidarr history:", error.message); + logger.error("Failed to fetch Lidarr history:", error.message); throw error; } } @@ -2002,11 +2601,45 @@ export async function getQueueCount( ); return response.data.totalRecords; } catch (error: any) { - console.error("Failed to get queue count:", error.message); + logger.error("Failed to get queue count:", error.message); return 0; } } +/** + * Get the full Lidarr queue + * Returns all items currently in the download queue + */ +export async function getQueue(): Promise { + const settings = await getSystemSettings(); + if ( + !settings?.lidarrEnabled || + !settings.lidarrUrl || + !settings.lidarrApiKey + ) { + return []; + } + + try { + const response = await axios.get( + `${settings.lidarrUrl}/api/v1/queue`, + { + params: { + page: 1, + pageSize: 100, + includeUnknownArtistItems: true, + }, + headers: { "X-Api-Key": settings.lidarrApiKey }, + } + ); + + return response.data.records || []; + } catch (error: any) { + logger.error("Failed to get Lidarr queue:", error.message); + return []; + } +} + /** * Check if a specific download is still actively downloading in Lidarr's queue * Returns true if actively downloading, false if not found or stuck @@ -2015,7 +2648,11 @@ export async function isDownloadActive( downloadId: string ): Promise<{ active: boolean; status?: string; progress?: number }> { const settings = await getSystemSettings(); - if (!settings?.lidarrEnabled || !settings.lidarrUrl || !settings.lidarrApiKey) { + if ( + !settings?.lidarrEnabled || + !settings.lidarrUrl || + !settings.lidarrApiKey + ) { return { active: false }; } @@ -2032,26 +2669,30 @@ export async function isDownloadActive( } ); - const item = response.data.records.find(r => r.downloadId === downloadId); - + const item = response.data.records.find( + (r) => r.downloadId === downloadId + ); + if (!item) { return { active: false, status: "not_found" }; } // Check if it's actively downloading (not stuck in warning/failed state) - const isActivelyDownloading = - item.status === "downloading" || - (item.trackedDownloadState === "downloading" && item.trackedDownloadStatus !== "warning"); + const isActivelyDownloading = + item.status === "downloading" || + (item.trackedDownloadState === "downloading" && + item.trackedDownloadStatus !== "warning"); return { active: isActivelyDownloading, status: item.trackedDownloadState || item.status, - progress: item.sizeleft && item.size - ? Math.round((1 - item.sizeleft / item.size) * 100) - : undefined + progress: + item.sizeleft && item.size + ? Math.round((1 - item.sizeleft / item.size) * 100) + : undefined, }; } catch (error: any) { - console.error("Failed to check download status:", error.message); + logger.error("Failed to check download status:", error.message); return { active: false }; } } diff --git a/backend/src/services/moodBucketService.ts b/backend/src/services/moodBucketService.ts index c21f1d9..8d49b0a 100644 --- a/backend/src/services/moodBucketService.ts +++ b/backend/src/services/moodBucketService.ts @@ -6,6 +6,7 @@ * instant mood mix generation through simple database lookups. */ +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; // Mood configuration with scoring rules @@ -16,6 +17,7 @@ export const MOOD_CONFIG = { name: "Happy & Upbeat", color: "from-yellow-400 to-orange-500", icon: "Smile", + moodTagKeywords: ["happy", "upbeat", "cheerful", "joyful", "positive"], // Primary: ML mood prediction primary: { moodHappy: { min: 0.5 }, moodSad: { max: 0.4 } }, // Fallback: basic audio features @@ -25,6 +27,7 @@ export const MOOD_CONFIG = { name: "Melancholic", color: "from-blue-600 to-indigo-700", icon: "CloudRain", + moodTagKeywords: ["sad", "melancholic", "melancholy", "dark", "somber"], primary: { moodSad: { min: 0.5 }, moodHappy: { max: 0.4 } }, fallback: { valence: { max: 0.35 }, keyScale: "minor" }, }, @@ -32,6 +35,7 @@ export const MOOD_CONFIG = { name: "Chill & Relaxed", color: "from-teal-400 to-cyan-500", icon: "Wind", + moodTagKeywords: ["relaxed", "chill", "calm", "mellow"], primary: { moodRelaxed: { min: 0.5 }, moodAggressive: { max: 0.3 } }, fallback: { energy: { max: 0.5 }, arousal: { max: 0.5 } }, }, @@ -39,6 +43,7 @@ export const MOOD_CONFIG = { name: "High Energy", color: "from-red-500 to-orange-600", icon: "Zap", + moodTagKeywords: ["energetic", "powerful", "exciting"], primary: { arousal: { min: 0.6 }, energy: { min: 0.7 } }, fallback: { bpm: { min: 120 }, energy: { min: 0.7 } }, }, @@ -46,6 +51,7 @@ export const MOOD_CONFIG = { name: "Dance Party", color: "from-pink-500 to-rose-600", icon: "PartyPopper", + moodTagKeywords: ["party", "danceable", "groovy"], primary: { moodParty: { min: 0.5 }, danceability: { min: 0.6 } }, fallback: { danceability: { min: 0.7 }, energy: { min: 0.6 } }, }, @@ -53,6 +59,7 @@ export const MOOD_CONFIG = { name: "Focus Mode", color: "from-purple-600 to-violet-700", icon: "Brain", + moodTagKeywords: ["instrumental"], primary: { instrumentalness: { min: 0.5 }, moodRelaxed: { min: 0.3 } }, fallback: { instrumentalness: { min: 0.5 }, @@ -63,6 +70,7 @@ export const MOOD_CONFIG = { name: "Deep Feels", color: "from-gray-700 to-slate-800", icon: "Moon", + moodTagKeywords: ["sad", "melancholic", "emotional", "dark"], primary: { moodSad: { min: 0.4 }, valence: { max: 0.4 } }, fallback: { valence: { max: 0.35 }, keyScale: "minor" }, }, @@ -70,6 +78,7 @@ export const MOOD_CONFIG = { name: "Intense", color: "from-red-700 to-gray-900", icon: "Flame", + moodTagKeywords: ["aggressive", "angry"], primary: { moodAggressive: { min: 0.5 } }, fallback: { energy: { min: 0.8 }, arousal: { min: 0.7 } }, }, @@ -77,6 +86,7 @@ export const MOOD_CONFIG = { name: "Acoustic Vibes", color: "from-amber-500 to-yellow-600", icon: "Guitar", + moodTagKeywords: ["acoustic"], primary: { moodAcoustic: { min: 0.5 }, moodElectronic: { max: 0.4 } }, fallback: { acousticness: { min: 0.6 }, @@ -123,6 +133,7 @@ interface TrackWithAnalysis { instrumentalness: number | null; bpm: number | null; keyScale: string | null; + moodTags: string[]; } export class MoodBucketService { @@ -153,11 +164,12 @@ export class MoodBucketService { instrumentalness: true, bpm: true, keyScale: true, + moodTags: true, }, }); if (!track || track.analysisStatus !== "completed") { - console.log( + logger.debug( `[MoodBucket] Track ${trackId} not analyzed yet, skipping` ); return []; @@ -199,7 +211,7 @@ export class MoodBucketService { .filter(([_, score]) => score > 0) .map(([mood]) => mood); - console.log( + logger.debug( `[MoodBucket] Track ${trackId} assigned to moods: ${ assignedMoods.join(", ") || "none" }` @@ -226,6 +238,16 @@ export class MoodBucketService { acoustic: 0, }; + // Check if we have individual mood fields OR moodTags + const hasIndividualMoods = track.moodHappy !== null || track.moodSad !== null; + const hasMoodTags = track.moodTags && track.moodTags.length > 0; + + // If we have moodTags but no individual mood fields, parse moodTags + if (!hasIndividualMoods && hasMoodTags) { + return this.calculateMoodScoresFromTags(track.moodTags); + } + + // Otherwise use original logic for (const [mood, config] of Object.entries(MOOD_CONFIG)) { const rules = isEnhanced ? config.primary : config.fallback; const score = this.evaluateMoodRules(track, rules); @@ -235,6 +257,43 @@ export class MoodBucketService { return scores; } + /** + * Calculate mood scores from moodTags array + * Used when individual mood fields are not populated + */ + private calculateMoodScoresFromTags(moodTags: string[]): Record { + const scores: Record = { + happy: 0, + sad: 0, + chill: 0, + energetic: 0, + party: 0, + focus: 0, + melancholy: 0, + aggressive: 0, + acoustic: 0, + }; + + const normalizedTags = moodTags.map(tag => tag.toLowerCase()); + + for (const [mood, config] of Object.entries(MOOD_CONFIG)) { + const keywords = config.moodTagKeywords; + let matchCount = 0; + + for (const keyword of keywords) { + if (normalizedTags.includes(keyword)) { + matchCount++; + } + } + + if (matchCount > 0) { + scores[mood as MoodType] = Math.min(1.0, 0.3 + (matchCount - 1) * 0.2); + } + } + + return scores; + } + /** * Evaluate mood rules against track features * Returns a score 0-1 based on how well the track matches the rules @@ -380,7 +439,7 @@ export class MoodBucketService { }); if (moodBuckets.length < 8) { - console.log( + logger.debug( `[MoodBucket] Not enough tracks for mood ${mood}: ${moodBuckets.length}` ); return null; @@ -465,7 +524,7 @@ export class MoodBucketService { }, }); - console.log( + logger.debug( `[MoodBucket] Saved ${mood} mix for user ${userId} (${mix.trackCount} tracks)` ); @@ -532,7 +591,7 @@ export class MoodBucketService { let assigned = 0; let skip = 0; - console.log("[MoodBucket] Starting backfill of all analyzed tracks..."); + logger.debug("[MoodBucket] Starting backfill of all analyzed tracks..."); while (true) { const tracks = await prisma.track.findMany({ @@ -555,6 +614,7 @@ export class MoodBucketService { instrumentalness: true, bpm: true, keyScale: true, + moodTags: true, }, skip, take: batchSize, @@ -601,12 +661,12 @@ export class MoodBucketService { } skip += batchSize; - console.log( + logger.debug( `[MoodBucket] Backfill progress: ${processed} tracks processed, ${assigned} mood assignments` ); } - console.log( + logger.debug( `[MoodBucket] Backfill complete: ${processed} tracks processed, ${assigned} mood assignments` ); return { processed, assigned }; diff --git a/backend/src/services/musicScanner.ts b/backend/src/services/musicScanner.ts index d40f272..de7d842 100644 --- a/backend/src/services/musicScanner.ts +++ b/backend/src/services/musicScanner.ts @@ -1,11 +1,18 @@ import * as fs from "fs"; +import { logger } from "../utils/logger"; import * as path from "path"; import { parseFile } from "music-metadata"; import { prisma } from "../utils/db"; import PQueue from "p-queue"; import { CoverArtExtractor } from "./coverArtExtractor"; import { deezerService } from "./deezer"; -import { normalizeArtistName, areArtistNamesSimilar, canonicalizeVariousArtists } from "../utils/artistNormalization"; +import { + normalizeArtistName, + areArtistNamesSimilar, + canonicalizeVariousArtists, + extractPrimaryArtist, + parseArtistFromPath, +} from "../utils/artistNormalization"; // Supported audio formats const AUDIO_EXTENSIONS = new Set([ @@ -64,11 +71,11 @@ export class MusicScannerService { duration: 0, }; - console.log(`Starting library scan: ${musicPath}`); + logger.debug(`Starting library scan: ${musicPath}`); // Step 1: Find all audio files const audioFiles = await this.findAudioFiles(musicPath); - console.log(`Found ${audioFiles.length} audio files`); + logger.debug(`Found ${audioFiles.length} audio files`); // Step 2: Get existing tracks from database const existingTracks = await prisma.track.findMany({ @@ -135,7 +142,7 @@ export class MusicScannerService { }; result.errors.push(error); progress.errors.push(error); - console.error(`Error processing ${audioFile}:`, err); + logger.error(`Error processing ${audioFile}:`, err); } finally { filesScanned++; progress.filesScanned = filesScanned; @@ -161,7 +168,7 @@ export class MusicScannerService { }, }); result.tracksRemoved = tracksToRemove.length; - console.log(`Removed ${tracksToRemove.length} missing tracks`); + logger.debug(`Removed ${tracksToRemove.length} missing tracks`); } // Step 5: Clean up orphaned albums (albums with no tracks) @@ -173,7 +180,7 @@ export class MusicScannerService { }); if (orphanedAlbums.length > 0) { - console.log(`Removing ${orphanedAlbums.length} orphaned albums...`); + logger.debug(`Removing ${orphanedAlbums.length} orphaned albums...`); await prisma.album.deleteMany({ where: { id: { in: orphanedAlbums.map((a) => a.id) }, @@ -190,7 +197,13 @@ export class MusicScannerService { }); if (orphanedArtists.length > 0) { - console.log(`Removing ${orphanedArtists.length} orphaned artists: ${orphanedArtists.map(a => a.name).join(', ')}`); + logger.debug( + `Removing ${ + orphanedArtists.length + } orphaned artists: ${orphanedArtists + .map((a) => a.name) + .join(", ")}` + ); await prisma.artist.deleteMany({ where: { id: { in: orphanedArtists.map((a) => a.id) }, @@ -199,79 +212,13 @@ export class MusicScannerService { } result.duration = Date.now() - startTime; - console.log( + logger.debug( `Scan complete: +${result.tracksAdded} ~${result.tracksUpdated} -${result.tracksRemoved} (${result.duration}ms)` ); return result; } - /** - * Extract the primary artist from collaboration strings - * Examples: - * "CHVRCHES & Robert Smith" -> "CHVRCHES" - * "Artist feat. Someone" -> "Artist" - * "Artist ft. Someone" -> "Artist" - * "Artist, Someone" -> "Artist" - * - * But preserves band names: - * "Earth, Wind & Fire" -> "Earth, Wind & Fire" (kept as-is) - * "The Naked and Famous" -> "The Naked and Famous" (kept as-is) - */ - private extractPrimaryArtist(artistName: string): string { - // Trim whitespace - artistName = artistName.trim(); - - // HIGH PRIORITY: These patterns almost always indicate collaborations - // (not band names) so we always split on them - const definiteCollaborationPatterns = [ - / feat\.? /i, // "feat." or "feat " - / ft\.? /i, // "ft." or "ft " - / featuring /i, - ]; - - for (const pattern of definiteCollaborationPatterns) { - const match = artistName.split(pattern); - if (match.length > 1) { - return match[0].trim(); - } - } - - // LOWER PRIORITY: These might be band names, so only split if the result - // looks like a complete artist name (not truncated) - const ambiguousPatterns = [ - { pattern: / \& /, name: "&" }, // "Earth, Wind & Fire" shouldn't split - { pattern: / and /i, name: "and" }, // "The Naked and Famous" shouldn't split - { pattern: / with /i, name: "with" }, - { pattern: /, /, name: "," }, - ]; - - for (const { pattern } of ambiguousPatterns) { - const parts = artistName.split(pattern); - if (parts.length > 1) { - const firstPart = parts[0].trim(); - const lastWord = firstPart.split(/\s+/).pop()?.toLowerCase() || ""; - - // Don't split if the first part ends with common incomplete words - // These suggest it's a band name, not a collaboration - const incompleteEndings = ["the", "a", "an", "and", "of", ","]; - if (incompleteEndings.includes(lastWord)) { - continue; // Skip this pattern, try the next one - } - - // Don't split if the first part is very short (likely incomplete) - if (firstPart.length < 4) { - continue; - } - - return firstPart; - } - } - - // No collaboration found, return as-is - return artistName; - } - /** * Check if a file path is within the discovery folder * Discovery albums are stored in paths like "discovery/Artist/Album/track.flac" @@ -294,12 +241,13 @@ export class MusicScannerService { return str .toLowerCase() .trim() - .normalize('NFD').replace(/[\u0300-\u036f]/g, '') // Remove diacritics (café → cafe) - .replace(/[''´`]/g, "'") // Normalize apostrophes - .replace(/[""„]/g, '"') // Normalize quotes - .replace(/[–—−]/g, '-') // Normalize dashes - .replace(/\s+/g, ' ') // Collapse whitespace - .replace(/[^\w\s'"-]/g, ''); // Remove other special chars + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") // Remove diacritics (café → cafe) + .replace(/[''´`]/g, "'") // Normalize apostrophes + .replace(/[""„]/g, '"') // Normalize quotes + .replace(/[–—−]/g, "-") // Normalize dashes + .replace(/\s+/g, " ") // Collapse whitespace + .replace(/[^\w\s'"-]/g, ""); // Remove other special chars } /** @@ -314,16 +262,23 @@ export class MusicScannerService { const normalizedArtist = this.normalizeForMatching(artistName); const normalizedAlbum = this.normalizeForMatching(albumTitle); - - // Also try with primary artist extracted (handles "Artist A feat. Artist B") - const primaryArtist = this.extractPrimaryArtist(artistName); - const normalizedPrimaryArtist = this.normalizeForMatching(primaryArtist); - console.log(`[Scanner] Checking discovery: "${artistName}" → "${normalizedArtist}"`); + // Also try with primary artist extracted (handles "Artist A feat. Artist B") + const primaryArtist = extractPrimaryArtist(artistName); + const normalizedPrimaryArtist = + this.normalizeForMatching(primaryArtist); + + logger.debug( + `[Scanner] Checking discovery: "${artistName}" -> "${normalizedArtist}"` + ); if (primaryArtist !== artistName) { - console.log(`[Scanner] Primary artist: "${primaryArtist}" → "${normalizedPrimaryArtist}"`); + logger.debug( + `[Scanner] Primary artist: "${primaryArtist}" -> "${normalizedPrimaryArtist}"` + ); } - console.log(`[Scanner] Album: "${albumTitle}" → "${normalizedAlbum}"`); + logger.debug( + `[Scanner] Album: "${albumTitle}" -> "${normalizedAlbum}"` + ); try { // Get all discovery jobs (pending, processing, or recently completed) @@ -334,16 +289,26 @@ export class MusicScannerService { }, }); - console.log(`[Scanner] Found ${discoveryJobs.length} discovery jobs to check`); + logger.debug( + `[Scanner] Found ${discoveryJobs.length} discovery jobs to check` + ); // Pass 1: Exact match after normalization for (const job of discoveryJobs) { const metadata = job.metadata as any; - const jobArtist = this.normalizeForMatching(metadata?.artistName || ""); - const jobAlbum = this.normalizeForMatching(metadata?.albumTitle || ""); + const jobArtist = this.normalizeForMatching( + metadata?.artistName || "" + ); + const jobAlbum = this.normalizeForMatching( + metadata?.albumTitle || "" + ); - if ((jobArtist === normalizedArtist || jobArtist === normalizedPrimaryArtist) && jobAlbum === normalizedAlbum) { - console.log(`[Scanner] EXACT MATCH: job ${job.id}`); + if ( + (jobArtist === normalizedArtist || + jobArtist === normalizedPrimaryArtist) && + jobAlbum === normalizedAlbum + ) { + logger.debug(`[Scanner] EXACT MATCH: job ${job.id}`); return true; } } @@ -351,23 +316,31 @@ export class MusicScannerService { // Pass 2: Partial match fallback (handles "Album" vs "Album (Deluxe)") for (const job of discoveryJobs) { const metadata = job.metadata as any; - const jobArtist = this.normalizeForMatching(metadata?.artistName || ""); - const jobAlbum = this.normalizeForMatching(metadata?.albumTitle || ""); + const jobArtist = this.normalizeForMatching( + metadata?.artistName || "" + ); + const jobAlbum = this.normalizeForMatching( + metadata?.albumTitle || "" + ); // Try matching both full artist name and extracted primary artist - const artistMatch = jobArtist === normalizedArtist || - jobArtist === normalizedPrimaryArtist || - normalizedArtist.includes(jobArtist) || - jobArtist.includes(normalizedArtist) || - normalizedPrimaryArtist.includes(jobArtist) || - jobArtist.includes(normalizedPrimaryArtist); - const albumMatch = jobAlbum === normalizedAlbum || - normalizedAlbum.includes(jobAlbum) || - jobAlbum.includes(normalizedAlbum); + const artistMatch = + jobArtist === normalizedArtist || + jobArtist === normalizedPrimaryArtist || + normalizedArtist.includes(jobArtist) || + jobArtist.includes(normalizedArtist) || + normalizedPrimaryArtist.includes(jobArtist) || + jobArtist.includes(normalizedPrimaryArtist); + const albumMatch = + jobAlbum === normalizedAlbum || + normalizedAlbum.includes(jobAlbum) || + jobAlbum.includes(normalizedAlbum); if (artistMatch && albumMatch) { - console.log(`[Scanner] PARTIAL MATCH: job ${job.id}`); - console.log(`[Scanner] Job: "${jobArtist}" - "${jobAlbum}"`); + logger.debug(`[Scanner] PARTIAL MATCH: job ${job.id}`); + logger.debug( + `[Scanner] Job: "${jobArtist}" - "${jobAlbum}"` + ); return true; } } @@ -376,59 +349,79 @@ export class MusicScannerService { // If the album title matches exactly, this track is likely a featured artist on a discovery album for (const job of discoveryJobs) { const metadata = job.metadata as any; - const jobAlbum = this.normalizeForMatching(metadata?.albumTitle || ""); + const jobAlbum = this.normalizeForMatching( + metadata?.albumTitle || "" + ); - if (jobAlbum === normalizedAlbum && normalizedAlbum.length > 3) { - console.log(`[Scanner] ALBUM-ONLY MATCH (featured artist): job ${job.id}`); - console.log(`[Scanner] Track artist "${normalizedArtist}" is likely featured on "${jobAlbum}"`); + if ( + jobAlbum === normalizedAlbum && + normalizedAlbum.length > 3 + ) { + logger.debug( + `[Scanner] ALBUM-ONLY MATCH (featured artist): job ${job.id}` + ); + logger.debug( + `[Scanner] Track artist "${normalizedArtist}" is likely featured on "${jobAlbum}"` + ); return true; } } // Pass 4: Check DiscoveryAlbum table (for already processed albums) by album title - const discoveryAlbumByTitle = await prisma.discoveryAlbum.findFirst({ - where: { - albumTitle: { equals: albumTitle, mode: "insensitive" }, - status: { in: ["ACTIVE", "LIKED"] }, - }, - }); + const discoveryAlbumByTitle = await prisma.discoveryAlbum.findFirst( + { + where: { + albumTitle: { equals: albumTitle, mode: "insensitive" }, + status: { in: ["ACTIVE", "LIKED"] }, + }, + } + ); if (discoveryAlbumByTitle) { - console.log(`[Scanner] DiscoveryAlbum match (by title): ${discoveryAlbumByTitle.id}`); + logger.debug( + `[Scanner] DiscoveryAlbum match (by title): ${discoveryAlbumByTitle.id}` + ); return true; } - + // Pass 5: Check if artist name matches any discovery album // This catches cases where Lidarr downloads a different album than requested // e.g., requested "Broods - Broods" but got "Broods - Evergreen" - const discoveryAlbumByArtist = await prisma.discoveryAlbum.findFirst({ - where: { - artistName: { equals: artistName, mode: "insensitive" }, - status: { in: ["ACTIVE", "LIKED", "DELETED"] }, // Include DELETED to catch cleanup scenarios - }, - }); + const discoveryAlbumByArtist = + await prisma.discoveryAlbum.findFirst({ + where: { + artistName: { equals: artistName, mode: "insensitive" }, + status: { in: ["ACTIVE", "LIKED", "DELETED"] }, // Include DELETED to catch cleanup scenarios + }, + }); if (discoveryAlbumByArtist) { // Double-check: only match if this artist has NO library albums yet // This prevents marking albums from artists that exist in both library and discovery const existingLibraryAlbum = await prisma.album.findFirst({ where: { - artist: { name: { equals: artistName, mode: "insensitive" } }, + artist: { + name: { equals: artistName, mode: "insensitive" }, + }, location: "LIBRARY", }, }); - + if (!existingLibraryAlbum) { - console.log(`[Scanner] DiscoveryAlbum match (by artist): ${discoveryAlbumByArtist.id}`); - console.log(`[Scanner] Artist "${artistName}" is a discovery-only artist`); + logger.debug( + `[Scanner] DiscoveryAlbum match (by artist): ${discoveryAlbumByArtist.id}` + ); + logger.debug( + `[Scanner] Artist "${artistName}" is a discovery-only artist` + ); return true; } } - console.log(`[Scanner] No discovery match found`); + logger.debug(`[Scanner] No discovery match found`); return false; } catch (error) { - console.error(`[Scanner] Error checking discovery status:`, error); + logger.error(`[Scanner] Error checking discovery status:`, error); return false; } } @@ -489,17 +482,36 @@ export class MusicScannerService { let rawArtistName = metadata.common.albumartist || metadata.common.artist || - "Unknown Artist"; + ""; + + // Folder fallback: If metadata is empty, try to parse from folder structure + if (!rawArtistName || rawArtistName.trim() === "") { + const folderPath = path.dirname(relativePath); + const folderName = path.basename(folderPath); + const parsedArtist = parseArtistFromPath(folderName); + + if (parsedArtist) { + logger.debug( + `[Scanner] No metadata artist found, using folder: "${folderName}" -> "${parsedArtist}"` + ); + rawArtistName = parsedArtist; + } else { + rawArtistName = "Unknown Artist"; + logger.warn( + `[Scanner] Unknown Artist assigned for: ${relativePath} (no metadata, folder parse failed: "${folderName}")` + ); + } + } const albumTitle = metadata.common.album || "Unknown Album"; const year = metadata.common.year || null; // ALWAYS extract primary artist first - this handles both: - // - Featured artists: "Artist A feat. Artist B" -> "Artist A" + // - Featured artists: "Artist A feat. Artist B" -> "Artist A" // - Collaborations: "Artist A & Artist B" -> "Artist A" // Band names like "Of Mice & Men" are preserved because extractPrimaryArtist // only splits on " feat.", " ft.", " featuring ", " & ", etc. (with spaces) - const extractedPrimaryArtist = this.extractPrimaryArtist(rawArtistName); + const extractedPrimaryArtist = extractPrimaryArtist(rawArtistName); let artistName = extractedPrimaryArtist; // Canonicalize Various Artists variations (VA, V.A., , etc.) @@ -511,7 +523,7 @@ export class MusicScannerService { let artist = await prisma.artist.findFirst({ where: { normalizedName: normalizedPrimaryName }, }); - + // If no match with primary name and we actually extracted something, // also try the full raw name (for bands like "Of Mice & Men") if (!artist && extractedPrimaryArtist !== rawArtistName) { @@ -531,11 +543,15 @@ export class MusicScannerService { // If we found an artist, optionally update to better capitalization if (artist && artist.name !== artistName) { // Check if the new name has better capitalization (starts with uppercase) - const currentNameIsLowercase = artist.name[0] === artist.name[0].toLowerCase(); - const newNameIsCapitalized = artistName[0] === artistName[0].toUpperCase(); + const currentNameIsLowercase = + artist.name[0] === artist.name[0].toLowerCase(); + const newNameIsCapitalized = + artistName[0] === artistName[0].toUpperCase(); if (currentNameIsLowercase && newNameIsCapitalized) { - console.log(`Updating artist name capitalization: "${artist.name}" -> "${artistName}"`); + logger.debug( + `Updating artist name capitalization: "${artist.name}" -> "${artistName}"` + ); artist = await prisma.artist.update({ where: { id: artist.id }, data: { name: artistName }, @@ -550,17 +566,27 @@ export class MusicScannerService { where: { normalizedName: { // Get artists whose normalized names start with similar prefix - startsWith: normalizedArtistName.substring(0, Math.min(3, normalizedArtistName.length)), + startsWith: normalizedArtistName.substring( + 0, + Math.min(3, normalizedArtistName.length) + ), }, }, - select: { id: true, name: true, normalizedName: true, mbid: true }, + select: { + id: true, + name: true, + normalizedName: true, + mbid: true, + }, }); // Check for fuzzy matches for (const candidate of similarArtists) { if (areArtistNamesSimilar(artistName, candidate.name, 95)) { - console.log(`Fuzzy match found: "${artistName}" -> "${candidate.name}"`); - artist = candidate; + logger.debug( + `Fuzzy match found: "${artistName}" -> "${candidate.name}"` + ); + artist = candidate as any; break; } } @@ -579,13 +605,15 @@ export class MusicScannerService { const tempArtist = await prisma.artist.findFirst({ where: { normalizedName: normalizedArtistName, - mbid: { startsWith: 'temp-' }, + mbid: { startsWith: "temp-" }, }, }); if (tempArtist) { // Consolidate: update temp artist to real MBID - console.log(`[SCANNER] Consolidating temp artist "${tempArtist.name}" with real MBID: ${artistMbid}`); + logger.debug( + `[SCANNER] Consolidating temp artist "${tempArtist.name}" with real MBID: ${artistMbid}` + ); artist = await prisma.artist.update({ where: { id: tempArtist.id }, data: { mbid: artistMbid }, @@ -635,8 +663,11 @@ export class MusicScannerService { // 2. Check if artist+album matches a discovery download job // 3. Check if artist is a discovery-only artist (has DISCOVER albums but no LIBRARY albums) const isDiscoveryByPath = this.isDiscoveryPath(relativePath); - const isDiscoveryByJob = await this.isDiscoveryDownload(artistName, albumTitle); - + const isDiscoveryByJob = await this.isDiscoveryDownload( + artistName, + albumTitle + ); + // Check if this artist is discovery-only (has no LIBRARY albums) // If so, any new albums from them should also be DISCOVER let isDiscoveryArtist = false; @@ -645,18 +676,23 @@ export class MusicScannerService { where: { artistId: artist.id }, select: { location: true }, }); - + // Artist is discovery-only if they have albums but NONE are LIBRARY if (artistAlbums.length > 0) { - const hasLibraryAlbums = artistAlbums.some(a => a.location === "LIBRARY"); + const hasLibraryAlbums = artistAlbums.some( + (a) => a.location === "LIBRARY" + ); isDiscoveryArtist = !hasLibraryAlbums; if (isDiscoveryArtist) { - console.log(`[Scanner] Discovery-only artist detected: ${artistName}`); + logger.debug( + `[Scanner] Discovery-only artist detected: ${artistName}` + ); } } } - - const isDiscoveryAlbum = isDiscoveryByPath || isDiscoveryByJob || isDiscoveryArtist; + + const isDiscoveryAlbum = + isDiscoveryByPath || isDiscoveryByJob || isDiscoveryArtist; album = await prisma.album.create({ data: { @@ -709,10 +745,11 @@ export class MusicScannerService { } if (needsExtraction) { - const coverPath = await this.coverArtExtractor.extractCoverArt( - absolutePath, - album.id - ); + const coverPath = + await this.coverArtExtractor.extractCoverArt( + absolutePath, + album.id + ); if (coverPath) { await prisma.album.update({ where: { id: album.id }, @@ -721,10 +758,11 @@ export class MusicScannerService { } else { // No embedded art, try fetching from Deezer try { - const deezerCover = await deezerService.getAlbumCover( - artistName, - albumTitle - ); + const deezerCover = + await deezerService.getAlbumCover( + artistName, + albumTitle + ); if (deezerCover) { await prisma.album.update({ where: { id: album.id }, diff --git a/backend/src/services/musicbrainz.ts b/backend/src/services/musicbrainz.ts index a1723ce..da9accd 100644 --- a/backend/src/services/musicbrainz.ts +++ b/backend/src/services/musicbrainz.ts @@ -1,4 +1,5 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import { redisClient } from "../utils/redis"; import { rateLimiter } from "./rateLimiter"; @@ -27,7 +28,7 @@ class MusicBrainzService { return JSON.parse(cached); } } catch (err) { - console.warn("Redis get error:", err); + logger.warn("Redis get error:", err); } // Use global rate limiter instead of local rate limiting @@ -39,7 +40,7 @@ class MusicBrainzService { const actualTtl = data === null ? 3600 : ttlSeconds; await redisClient.setEx(cacheKey, actualTtl, JSON.stringify(data)); } catch (err) { - console.warn("Redis set error:", err); + logger.warn("Redis set error:", err); } return data; @@ -343,10 +344,10 @@ class MusicBrainzService { const allRecordings = response.data.recordings || []; - console.log( + logger.debug( `[MusicBrainz] Query: "${trackTitle}" by "${artistName}"` ); - console.log( + logger.debug( `[MusicBrainz] Found ${allRecordings.length} total recordings` ); @@ -358,7 +359,7 @@ class MusicBrainzService { .slice(0, 2) .map((r: any) => r["release-group"]?.title || "?") .join(", "); - console.log( + logger.debug( ` ${i + 1}. [${disambig}] → ${ albumNames || "(no albums)" }` @@ -378,7 +379,7 @@ class MusicBrainzService { return true; }); - console.log( + logger.debug( `[MusicBrainz] After filtering live/demo: ${recordings.length} studio recordings` ); @@ -425,20 +426,28 @@ class MusicBrainzService { const strippedArtist = this.stripPunctuation(artistName); if (strippedTitle !== normalizedTitle) { - console.log(`[MusicBrainz] Trying punctuation-stripped search: "${strippedTitle}" by ${strippedArtist}`); + logger.debug( + `[MusicBrainz] Trying punctuation-stripped search: "${strippedTitle}" by ${strippedArtist}` + ); const strippedQuery = `${strippedTitle} AND artist:${strippedArtist}`; - const strippedResponse = await this.client.get("/recording", { - params: { - query: strippedQuery, - limit: 10, - fmt: "json", - inc: "releases+release-groups+artists", - }, - }); + const strippedResponse = await this.client.get( + "/recording", + { + params: { + query: strippedQuery, + limit: 10, + fmt: "json", + inc: "releases+release-groups+artists", + }, + } + ); - const strippedRecordings = strippedResponse.data.recordings || []; - console.log(`[MusicBrainz] Punctuation-stripped search found ${strippedRecordings.length} recordings`); + const strippedRecordings = + strippedResponse.data.recordings || []; + logger.debug( + `[MusicBrainz] Punctuation-stripped search found ${strippedRecordings.length} recordings` + ); for (const rec of strippedRecordings) { const recArtist = @@ -448,11 +457,18 @@ class MusicBrainzService { if ( recArtist .toLowerCase() - .includes(strippedArtist.toLowerCase().split(" ")[0]) + .includes( + strippedArtist + .toLowerCase() + .split(" ")[0] + ) ) { - const result = this.extractAlbumFromRecording(rec); + const result = + this.extractAlbumFromRecording(rec); if (result) { - console.log(`[MusicBrainz] ✓ Found via punctuation-stripped search: ${result.albumName}`); + logger.debug( + `[MusicBrainz] Found via punctuation-stripped search: ${result.albumName}` + ); return result; } } @@ -464,34 +480,45 @@ class MusicBrainzService { // Try each recording until we find one with a good (non-bootleg) album for (const rec of recordings) { - const disambig = rec.disambiguation || "(no disambiguation)"; - console.log(`[MusicBrainz] Trying recording: "${rec.title}" [${disambig}]`); + const disambig = + rec.disambiguation || "(no disambiguation)"; + logger.debug( + `[MusicBrainz] Trying recording: "${rec.title}" [${disambig}]` + ); const result = this.extractAlbumFromRecording(rec, false); if (result) { - console.log(`[MusicBrainz] ✓ Found album: "${result.albumName}" (MBID: ${result.albumMbid})`); + logger.debug( + `[MusicBrainz] Found album: "${result.albumName}" (MBID: ${result.albumMbid})` + ); return result; // Found a good album } else { - console.log(`[MusicBrainz] ✗ No valid album found for this recording`); + logger.debug( + `[MusicBrainz] No valid album found for this recording` + ); } } // Fallback: Try again accepting Singles/EPs as last resort - console.log(`[MusicBrainz] No official albums found, trying to find Singles/EPs...`); + logger.debug( + `[MusicBrainz] No official albums found, trying to find Singles/EPs...` + ); for (const rec of recordings) { const result = this.extractAlbumFromRecording(rec, true); if (result) { - console.log(`[MusicBrainz] ✓ Found Single/EP: "${result.albumName}" (MBID: ${result.albumMbid})`); + logger.debug( + `[MusicBrainz] Found Single/EP: "${result.albumName}" (MBID: ${result.albumMbid})` + ); return result; } } // No good albums found in any recording - console.log( + logger.debug( `[MusicBrainz] No official albums or singles found for "${trackTitle}" by ${artistName} (checked ${recordings.length} recordings)` ); return null; } catch (error: any) { - console.error( + logger.error( "MusicBrainz recording search error:", error.message ); @@ -505,7 +532,10 @@ class MusicBrainzService { * Prioritizes studio albums and filters out compilations, live albums, and bootlegs * @param allowSingles - If true, accepts Singles/EPs as a fallback (lower threshold) */ - private extractAlbumFromRecording(recording: any, allowSingles: boolean = false): { + private extractAlbumFromRecording( + recording: any, + allowSingles: boolean = false + ): { albumName: string; albumMbid: string; artistMbid: string; @@ -582,10 +612,12 @@ class MusicBrainzService { r.release["release-group"]?.title || r.release.title; return `"${title}" (${r.score})`; }); - console.log( + logger.debug( `[MusicBrainz] Skipping recording - no ${modeText} found in ${ releases.length - } releases (threshold: ${threshold}). Top scores: ${topScores.join(", ")}` + } releases (threshold: ${threshold}). Top scores: ${topScores.join( + ", " + )}` ); return null; } @@ -597,7 +629,7 @@ class MusicBrainzService { return null; } - console.log( + logger.debug( `[MusicBrainz] Selected "${releaseGroup.title}" (score: ${bestResult.score}) from ${releases.length} releases` ); @@ -614,14 +646,19 @@ class MusicBrainzService { * Clear cached recording search result * Useful for retrying failed lookups */ - async clearRecordingCache(trackTitle: string, artistName: string): Promise { + async clearRecordingCache( + trackTitle: string, + artistName: string + ): Promise { const cacheKey = `mb:search:recording:${artistName}:${trackTitle}`; try { await redisClient.del(cacheKey); - console.log(`[MusicBrainz] Cleared cache for: "${trackTitle}" by ${artistName}`); + logger.debug( + `[MusicBrainz] Cleared cache for: "${trackTitle}" by ${artistName}` + ); return true; } catch (err) { - console.warn("Redis del error:", err); + logger.warn("Redis del error:", err); return false; } } @@ -644,13 +681,91 @@ class MusicBrainzService { } } - console.log(`[MusicBrainz] Cleared ${cleared} stale null cache entries`); + logger.debug( + `[MusicBrainz] Cleared ${cleared} stale null cache entries` + ); return cleared; } catch (err) { - console.error("Error clearing stale caches:", err); + logger.error("Error clearing stale caches:", err); return 0; } } + + /** + * Get track list for an album by release group MBID + * Uses the first official release from the release group + */ + async getAlbumTracks( + rgMbid: string + ): Promise> { + const cacheKey = `mb:albumtracks:${rgMbid}`; + + return this.cachedRequest(cacheKey, async () => { + try { + // Step 1: Get releases from the release group + const rgResponse = await this.client.get( + `/release-group/${rgMbid}`, + { + params: { + inc: "releases", + fmt: "json", + }, + } + ); + + const releases = rgResponse.data?.releases || []; + if (releases.length === 0) { + logger.debug( + `[MusicBrainz] No releases found for release group ${rgMbid}` + ); + return []; + } + + // Prefer official releases + const release = + releases.find((r: any) => r.status === "Official") || + releases[0]; + + // Step 2: Get full release details with recordings + const releaseResponse = await this.client.get( + `/release/${release.id}`, + { + params: { + inc: "recordings", + fmt: "json", + }, + } + ); + + const media = releaseResponse.data?.media || []; + const tracks: Array<{ + title: string; + position?: number; + duration?: number; + }> = []; + + for (const medium of media) { + for (const track of medium.tracks || []) { + tracks.push({ + title: track.title || track.recording?.title, + position: track.position, + duration: track.length || track.recording?.length, + }); + } + } + + logger.debug( + `[MusicBrainz] Found ${tracks.length} tracks for release group ${rgMbid}` + ); + return tracks; + } catch (error: any) { + logger.error( + `MusicBrainz getAlbumTracks error: ${error.message}` + ); + return []; + } + }); + } } export const musicBrainzService = new MusicBrainzService(); diff --git a/backend/src/services/notificationPolicyService.ts b/backend/src/services/notificationPolicyService.ts new file mode 100644 index 0000000..2f7fb50 --- /dev/null +++ b/backend/src/services/notificationPolicyService.ts @@ -0,0 +1,421 @@ +/** + * Notification Policy Service + * + * Intelligent notification filtering for download jobs. + * Suppresses intermediate failures during active retry cycles, + * only sending notifications for terminal states (completed/exhausted). + * + * State Machine: PENDING → PROCESSING → COMPLETED/EXHAUSTED + * + * Policy: + * - SUPPRESS: All failures during active retry window + * - SEND: Final success, permanent failure after retries exhausted + */ + +import { logger } from "../utils/logger"; +import { prisma } from "../utils/db"; + +interface NotificationDecision { + shouldNotify: boolean; + reason: string; + notificationType?: "download_complete" | "download_failed"; +} + +// Configuration constants +const DEFAULT_RETRY_WINDOW_MINUTES = 30; +const SUPPRESS_TRANSIENT_FAILURES = true; + +// Failure classification patterns +const TRANSIENT_PATTERNS = [ + "no sources found", + "no indexer results", + "no releases available", + "import failed", + "connection timeout", + "rate limited", + "temporarily unavailable", + "searching for alternative", + "download stuck", +]; + +const PERMANENT_PATTERNS = [ + "all releases exhausted", + "all albums exhausted", + "artist not found", + "download cancelled", + "album not found in lidarr", +]; + +const CRITICAL_PATTERNS = [ + "disk full", + "permission denied", + "lidarr unavailable", + "authentication failed", + "invalid api key", +]; + +type FailureClassification = "transient" | "permanent" | "critical"; + +class NotificationPolicyService { + /** + * Evaluate whether a notification should be sent for a download job. + * + * @param jobId - The download job ID + * @param eventType - The type of event (complete, failed, retry, timeout) + * @returns Decision on whether to send notification + */ + async evaluateNotification( + jobId: string, + eventType: "complete" | "failed" | "retry" | "timeout" + ): Promise { + logger.debug( + `[NOTIFICATION-POLICY] Evaluating: ${jobId} (${eventType})` + ); + + // Fetch job with current state + const job = await prisma.downloadJob.findUnique({ + where: { id: jobId }, + }); + + if (!job) { + return { + shouldNotify: false, + reason: "Job not found", + }; + } + + const metadata = (job.metadata as any) || {}; + const downloadType = metadata.downloadType || "library"; + + // Discovery and Spotify Import jobs never send individual notifications + // (they send batch notifications instead) + if (downloadType === "discovery" || metadata.spotifyImportJobId) { + return { + shouldNotify: false, + reason: `${downloadType} download - batch notification only`, + }; + } + + // Check if notification already sent for this job + if (metadata.notificationSent === true) { + return { + shouldNotify: false, + reason: "Notification already sent for this job", + }; + } + + // Handle based on job status + switch (job.status) { + case "completed": + return await this.evaluateCompletedJob(job, eventType); + + case "processing": + return await this.evaluateProcessingJob(job, eventType); + + case "failed": + case "exhausted": + return await this.evaluateFailedJob(job, eventType); + + case "pending": + return { + shouldNotify: false, + reason: "Job not started yet", + }; + + default: + return { + shouldNotify: false, + reason: `Unknown status: ${job.status}`, + }; + } + } + + /** + * Evaluate notification for completed job + */ + private async evaluateCompletedJob( + job: any, + eventType: string + ): Promise { + if (eventType !== "complete") { + return { + shouldNotify: false, + reason: "Invalid event type for completed job", + }; + } + + // Check if another job for same album already notified + const hasOtherNotification = await this.hasAlreadyNotified(job); + if (hasOtherNotification) { + return { + shouldNotify: false, + reason: "Another job for same album already sent notification", + }; + } + + return { + shouldNotify: true, + reason: "Download completed successfully", + notificationType: "download_complete", + }; + } + + /** + * Evaluate notification for processing job + */ + private async evaluateProcessingJob( + job: any, + eventType: string + ): Promise { + // Processing jobs should never send notifications + // They're still in active retry window + if (eventType === "complete") { + return { + shouldNotify: false, + reason: "Job still processing - wait for status update to completed", + }; + } + + if (eventType === "failed" || eventType === "retry") { + // Check if in retry window + const inRetryWindow = await this.isInRetryWindow(job); + if (inRetryWindow) { + return { + shouldNotify: false, + reason: "Job in active retry window - suppressing notification", + }; + } + // Retry window expired but still processing - extend it + return { + shouldNotify: false, + reason: "Retry window expired but job still processing - extending timeout", + }; + } + + if (eventType === "timeout") { + const inRetryWindow = await this.isInRetryWindow(job); + if (inRetryWindow) { + return { + shouldNotify: false, + reason: "Still in retry window - extending timeout", + }; + } + // Timeout expired and out of retry window - let caller handle failure + return { + shouldNotify: false, + reason: "Timeout expired - caller should mark as failed", + }; + } + + return { + shouldNotify: false, + reason: "Processing job - no notification needed", + }; + } + + /** + * Evaluate notification for failed/exhausted job + */ + private async evaluateFailedJob( + job: any, + eventType: string + ): Promise { + if (eventType !== "failed" && eventType !== "timeout") { + return { + shouldNotify: false, + reason: "Invalid event type for failed job", + }; + } + + // Check if another job for same album already notified + const hasOtherNotification = await this.hasAlreadyNotified(job); + if (hasOtherNotification) { + return { + shouldNotify: false, + reason: "Another job for same album already sent notification", + }; + } + + // Classify the failure + const classification = this.classifyFailure( + job, + job.error || "Unknown error" + ); + + // Critical errors always notify + if (classification === "critical") { + return { + shouldNotify: true, + reason: "Critical error requires user intervention", + notificationType: "download_failed", + }; + } + + // Transient failures - suppress if configured + if (classification === "transient" && SUPPRESS_TRANSIENT_FAILURES) { + return { + shouldNotify: false, + reason: "Transient failure - suppressed (may succeed on retry)", + }; + } + + // Permanent failures or transient with suppress disabled + return { + shouldNotify: true, + reason: + classification === "permanent" + ? "Permanent failure after retries exhausted" + : "Failure notification (transient suppression disabled)", + notificationType: "download_failed", + }; + } + + /** + * Check if job is in active retry window + * A job is in retry window if: + * 1. Status is 'processing' + * 2. Started within the last RETRY_WINDOW_MINUTES + */ + private async isInRetryWindow(job: any): Promise { + if (job.status !== "processing") { + return false; + } + + const metadata = (job.metadata as any) || {}; + + // Get retry window duration (configurable per job or use default) + const retryWindowMinutes = + metadata.retryWindowMinutes || DEFAULT_RETRY_WINDOW_MINUTES; + + // Get start time + const startedAt = metadata.startedAt + ? new Date(metadata.startedAt) + : job.createdAt; + + // Calculate if window has expired + const windowMs = retryWindowMinutes * 60 * 1000; + const elapsed = Date.now() - startedAt.getTime(); + + if (elapsed > windowMs) { + logger.debug( + `[NOTIFICATION-POLICY] Retry window expired (${Math.round( + elapsed / 60000 + )}m > ${retryWindowMinutes}m)` + ); + return false; + } + + logger.debug( + `[NOTIFICATION-POLICY] In retry window (${Math.round( + elapsed / 60000 + )}m < ${retryWindowMinutes}m)` + ); + return true; + } + + /** + * Check if another job for the same artist+album has already sent a notification + * Prevents duplicate notifications when multiple jobs exist for same album + */ + private async hasAlreadyNotified(job: any): Promise { + const metadata = (job.metadata as any) || {}; + const artistName = metadata?.artistName?.toLowerCase().trim() || ""; + const albumTitle = metadata?.albumTitle?.toLowerCase().trim() || ""; + + if (!artistName || !albumTitle) { + return false; + } + + // Find other jobs for same album that have notified + const otherNotifiedJob = await prisma.downloadJob.findFirst({ + where: { + id: { not: job.id }, + userId: job.userId, + status: { in: ["completed", "failed", "exhausted"] }, + }, + }); + + if (otherNotifiedJob) { + const otherMeta = (otherNotifiedJob.metadata as any) || {}; + const otherArtist = + otherMeta?.artistName?.toLowerCase().trim() || ""; + const otherAlbum = + otherMeta?.albumTitle?.toLowerCase().trim() || ""; + + // Check if same album and notification was sent + if ( + otherArtist === artistName && + otherAlbum === albumTitle && + otherMeta?.notificationSent === true + ) { + logger.debug( + `[NOTIFICATION-POLICY] Found duplicate notification in job ${otherNotifiedJob.id}` + ); + return true; + } + } + + return false; + } + + /** + * Classify failure type based on error message + * @returns 'transient' | 'permanent' | 'critical' + */ + private classifyFailure(job: any, error: string): FailureClassification { + const errorLower = error.toLowerCase(); + + // Check critical patterns first + for (const pattern of CRITICAL_PATTERNS) { + if (errorLower.includes(pattern)) { + logger.debug( + `[NOTIFICATION-POLICY] Classified as CRITICAL: ${pattern}` + ); + return "critical"; + } + } + + // Check permanent patterns + for (const pattern of PERMANENT_PATTERNS) { + if (errorLower.includes(pattern)) { + logger.debug( + `[NOTIFICATION-POLICY] Classified as PERMANENT: ${pattern}` + ); + return "permanent"; + } + } + + // Check transient patterns + for (const pattern of TRANSIENT_PATTERNS) { + if (errorLower.includes(pattern)) { + logger.debug( + `[NOTIFICATION-POLICY] Classified as TRANSIENT: ${pattern}` + ); + return "transient"; + } + } + + // Default to transient if unknown + logger.debug( + `[NOTIFICATION-POLICY] Classified as TRANSIENT (default)` + ); + return "transient"; + } + + /** + * Get configuration for notification policy + * Can be extended to pull from user settings or system config + */ + getConfig(): { + retryWindowMinutes: number; + suppressTransientFailures: boolean; + } { + return { + retryWindowMinutes: DEFAULT_RETRY_WINDOW_MINUTES, + suppressTransientFailures: SUPPRESS_TRANSIENT_FAILURES, + }; + } +} + +// Singleton instance +export const notificationPolicyService = new NotificationPolicyService(); diff --git a/backend/src/services/notificationService.ts b/backend/src/services/notificationService.ts index d9c668b..714dc7a 100644 --- a/backend/src/services/notificationService.ts +++ b/backend/src/services/notificationService.ts @@ -1,4 +1,5 @@ import { PrismaClient } from "@prisma/client"; +import { logger } from "../utils/logger"; const prisma = new PrismaClient(); @@ -35,7 +36,7 @@ class NotificationService { }, }); - console.log( + logger.debug( `[NOTIFICATION] Created: ${type} - ${title} for user ${userId}` ); return notification; @@ -124,7 +125,7 @@ class NotificationService { }); if (result.count > 0) { - console.log( + logger.debug( `[NOTIFICATION] Cleaned up ${result.count} old notifications` ); } diff --git a/backend/src/services/openai.ts b/backend/src/services/openai.ts index bdd0442..9310eba 100644 --- a/backend/src/services/openai.ts +++ b/backend/src/services/openai.ts @@ -1,4 +1,5 @@ import axios, { AxiosInstance } from "axios"; +import { logger } from "../utils/logger"; import { config } from "../config"; interface PlaylistTrack { @@ -131,14 +132,14 @@ Return ONLY valid JSON, no markdown formatting.`; return result.tracks || []; } catch (error: any) { - console.error( + logger.error( "OpenAI API error:", error.response?.data || error.message ); // Log the raw response content for debugging if (error instanceof SyntaxError) { - console.error("Failed to parse JSON response"); + logger.error("Failed to parse JSON response"); } throw new Error("Failed to generate playlist with AI"); @@ -175,7 +176,7 @@ Be concise and engaging (max 15 words).`; return response.data.choices[0].message.content.trim(); } catch (error) { - console.error("OpenAI enhancement error:", error); + logger.error("OpenAI enhancement error:", error); return "Recommended based on your listening history"; } } diff --git a/backend/src/services/podcastCache.ts b/backend/src/services/podcastCache.ts index 8ccd024..94decb1 100644 --- a/backend/src/services/podcastCache.ts +++ b/backend/src/services/podcastCache.ts @@ -1,4 +1,5 @@ import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; import fs from "fs/promises"; import path from "path"; import { config } from "../config"; @@ -40,7 +41,7 @@ export class PodcastCacheService { }; try { - console.log(" Starting podcast cover sync..."); + logger.debug(" Starting podcast cover sync..."); // Ensure cover cache directory exists await fs.mkdir(this.coverCacheDir, { recursive: true }); @@ -53,7 +54,7 @@ export class PodcastCacheService { }, }); - console.log( + logger.debug( `[PODCAST] Found ${podcasts.length} podcasts needing cover sync` ); @@ -72,7 +73,7 @@ export class PodcastCacheService { data: { localCoverPath: localPath }, }); result.synced++; - console.log(` Synced cover for: ${podcast.title}`); + logger.debug(` Synced cover for: ${podcast.title}`); } else { result.skipped++; } @@ -81,18 +82,18 @@ export class PodcastCacheService { result.failed++; const errorMsg = `Failed to sync cover for ${podcast.title}: ${error.message}`; result.errors.push(errorMsg); - console.error(` ✗ ${errorMsg}`); + logger.error(` ${errorMsg}`); } } - console.log("\nPodcast Cover Sync Summary:"); - console.log(` Synced: ${result.synced}`); - console.log(` Failed: ${result.failed}`); - console.log(` Skipped: ${result.skipped}`); + logger.debug("\nPodcast Cover Sync Summary:"); + logger.debug(` Synced: ${result.synced}`); + logger.debug(` Failed: ${result.failed}`); + logger.debug(` Skipped: ${result.skipped}`); return result; } catch (error: any) { - console.error(" Podcast cover sync failed:", error); + logger.error(" Podcast cover sync failed:", error); throw error; } } @@ -109,7 +110,7 @@ export class PodcastCacheService { }; try { - console.log(" Starting podcast episode cover sync..."); + logger.debug(" Starting podcast episode cover sync..."); await fs.mkdir(this.coverCacheDir, { recursive: true }); @@ -133,7 +134,7 @@ export class PodcastCacheService { (ep) => ep.imageUrl !== ep.podcast.imageUrl ); - console.log( + logger.debug( `[PODCAST] Found ${uniqueEpisodes.length} episodes with unique covers` ); @@ -152,7 +153,7 @@ export class PodcastCacheService { data: { localCoverPath: localPath }, }); result.synced++; - console.log( + logger.debug( ` Synced cover for episode: ${episode.title}` ); } else { @@ -163,18 +164,18 @@ export class PodcastCacheService { result.failed++; const errorMsg = `Failed to sync cover for episode ${episode.title}: ${error.message}`; result.errors.push(errorMsg); - console.error(` ✗ ${errorMsg}`); + logger.error(` ${errorMsg}`); } } - console.log("\nEpisode Cover Sync Summary:"); - console.log(` Synced: ${result.synced}`); - console.log(` Failed: ${result.failed}`); - console.log(` Skipped: ${result.skipped}`); + logger.debug("\nEpisode Cover Sync Summary:"); + logger.debug(` Synced: ${result.synced}`); + logger.debug(` Failed: ${result.failed}`); + logger.debug(` Skipped: ${result.skipped}`); return result; } catch (error: any) { - console.error(" Episode cover sync failed:", error); + logger.error(" Episode cover sync failed:", error); throw error; } } @@ -204,7 +205,7 @@ export class PodcastCacheService { return filePath; } catch (error: any) { - console.error( + logger.error( `Failed to download cover for ${type} ${id}:`, error.message ); @@ -240,7 +241,7 @@ export class PodcastCacheService { if (!validCoverPaths.has(file)) { await fs.unlink(path.join(this.coverCacheDir, file)); deleted++; - console.log(` [DELETE] Deleted orphaned podcast cover: ${file}`); + logger.debug(` [DELETE] Deleted orphaned podcast cover: ${file}`); } } diff --git a/backend/src/services/podcastDownload.ts b/backend/src/services/podcastDownload.ts index 1bc0380..223b2ed 100644 --- a/backend/src/services/podcastDownload.ts +++ b/backend/src/services/podcastDownload.ts @@ -1,4 +1,5 @@ import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; import { config } from "../config"; import fs from "fs/promises"; import path from "path"; @@ -53,7 +54,7 @@ export function getDownloadProgress(episodeId: string): { progress: number; down export async function getCachedFilePath(episodeId: string): Promise { // Don't return cache path if still downloading - file may be incomplete if (downloadingEpisodes.has(episodeId)) { - console.log(`[PODCAST-DL] Episode ${episodeId} is still downloading, not using cache`); + logger.debug(`[PODCAST-DL] Episode ${episodeId} is still downloading, not using cache`); return null; } @@ -78,7 +79,7 @@ export async function getCachedFilePath(episodeId: string): Promise 0.01) { - console.log( + logger.debug( `[PODCAST-DL] Episode size mismatch vs episode.fileSize for ${episodeId}: actual ${actual} vs expected ${expected} (variance ${Math.round( variance * 100 )}%), deleting cache` @@ -101,7 +102,7 @@ export async function getCachedFilePath(episodeId: string): Promise {}); return null; } @@ -112,7 +113,7 @@ export async function getCachedFilePath(episodeId: string): Promise 0 && variance > 0.01) { - console.log(`[PODCAST-DL] Size mismatch for ${episodeId}: actual ${actualSize} vs expected ${Math.round(expectedSize)}, deleting`); + logger.debug(`[PODCAST-DL] Size mismatch for ${episodeId}: actual ${actualSize} vs expected ${Math.round(expectedSize)}, deleting`); await fs.unlink(cachedPath).catch(() => {}); await prisma.podcastDownload.deleteMany({ where: { episodeId } }); return null; @@ -124,7 +125,7 @@ export async function getCachedFilePath(episodeId: string): Promise { - console.error(`[PODCAST-DL] Background download failed for ${episodeId}:`, err.message); + logger.error(`[PODCAST-DL] Background download failed for ${episodeId}:`, err.message); }) .finally(() => { downloadingEpisodes.delete(episodeId); @@ -171,7 +172,7 @@ async function performDownload( attempt: number = 1 ): Promise { const maxAttempts = 3; - console.log(`[PODCAST-DL] Starting background download for episode ${episodeId} (attempt ${attempt}/${maxAttempts})`); + logger.debug(`[PODCAST-DL] Starting background download for episode ${episodeId} (attempt ${attempt}/${maxAttempts})`); const cacheDir = getPodcastCacheDir(); @@ -187,7 +188,7 @@ async function performDownload( const existingCached = await getCachedFilePath(episodeId); downloadingEpisodes.add(episodeId); // Re-add if (existingCached) { - console.log(`[PODCAST-DL] Episode ${episodeId} already cached, skipping download`); + logger.debug(`[PODCAST-DL] Episode ${episodeId} already cached, skipping download`); return; } @@ -247,7 +248,7 @@ async function performDownload( } catch {} } - console.log( + logger.debug( `[PODCAST-DL] Downloading ${episodeId} (${expectedBytes > 0 ? Math.round(expectedBytes / 1024 / 1024) : 0}MB)` ); @@ -271,7 +272,7 @@ async function performDownload( const now = Date.now(); if (now - lastLogTime > 30000) { const percent = contentLength > 0 ? Math.round((bytesDownloaded / contentLength) * 100) : 0; - console.log(`[PODCAST-DL] Download progress ${episodeId}: ${percent}% (${Math.round(bytesDownloaded / 1024 / 1024)}MB)`); + logger.debug(`[PODCAST-DL] Download progress ${episodeId}: ${percent}% (${Math.round(bytesDownloaded / 1024 / 1024)}MB)`); lastLogTime = now; } }); @@ -312,7 +313,7 @@ async function performDownload( const variance = Math.abs(stats.size - expectedBytes) / expectedBytes; if (variance > 0.01) { const percentComplete = Math.round((stats.size / expectedBytes) * 100); - console.error(`[PODCAST-DL] Incomplete download for ${episodeId}: ${stats.size}/${expectedBytes} bytes (${percentComplete}%)`); + logger.error(`[PODCAST-DL] Incomplete download for ${episodeId}: ${stats.size}/${expectedBytes} bytes (${percentComplete}%)`); await fs.unlink(tempPath).catch(() => {}); throw new Error(`Download incomplete: got ${stats.size} bytes, expected ${expectedBytes}`); } @@ -344,7 +345,7 @@ async function performDownload( } }); - console.log(`[PODCAST-DL] Successfully cached episode ${episodeId} (${fileSizeMb.toFixed(1)}MB)`); + logger.debug(`[PODCAST-DL] Successfully cached episode ${episodeId} (${fileSizeMb.toFixed(1)}MB)`); // Clean up progress tracking downloadProgress.delete(episodeId); @@ -356,7 +357,7 @@ async function performDownload( // Retry on failure if (attempt < maxAttempts) { - console.log(`[PODCAST-DL] Download failed (attempt ${attempt}), retrying in 5s: ${error.message}`); + logger.debug(`[PODCAST-DL] Download failed (attempt ${attempt}), retrying in 5s: ${error.message}`); await new Promise(resolve => setTimeout(resolve, 5000)); return performDownload(episodeId, audioUrl, userId, attempt + 1); } @@ -370,7 +371,7 @@ async function performDownload( * Should be called periodically (e.g., daily) */ export async function cleanupExpiredCache(): Promise<{ deleted: number; freedMb: number }> { - console.log('[PODCAST-DL] Starting cache cleanup...'); + logger.debug('[PODCAST-DL] Starting cache cleanup...'); const thirtyDaysAgo = new Date(); thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30); @@ -398,13 +399,13 @@ export async function cleanupExpiredCache(): Promise<{ deleted: number; freedMb: deleted++; freedMb += download.fileSizeMb; - console.log(`[PODCAST-DL] Deleted expired cache: ${path.basename(download.localPath)}`); + logger.debug(`[PODCAST-DL] Deleted expired cache: ${path.basename(download.localPath)}`); } catch (err: any) { - console.error(`[PODCAST-DL] Failed to delete ${download.localPath}:`, err.message); + logger.error(`[PODCAST-DL] Failed to delete ${download.localPath}:`, err.message); } } - console.log(`[PODCAST-DL] Cleanup complete: ${deleted} files deleted, ${freedMb.toFixed(1)}MB freed`); + logger.debug(`[PODCAST-DL] Cleanup complete: ${deleted} files deleted, ${freedMb.toFixed(1)}MB freed`); return { deleted, freedMb }; } diff --git a/backend/src/services/programmaticPlaylists.ts b/backend/src/services/programmaticPlaylists.ts index 8d5aea2..b2b91df 100644 --- a/backend/src/services/programmaticPlaylists.ts +++ b/backend/src/services/programmaticPlaylists.ts @@ -1,6 +1,12 @@ import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; import { lastFmService } from "./lastfm"; import { moodBucketService } from "./moodBucketService"; +import { + getDecadeWhereClause, + getEffectiveYear, + getDecadeFromYear, +} from "../utils/dateFilters"; export interface ProgrammaticMix { id: string; @@ -109,10 +115,14 @@ function getMixColor(type: string): string { return MIX_COLORS[type] || MIX_COLORS["default"]; } -// Helper to randomly sample from array +// Helper to randomly sample from array using Fisher-Yates shuffle function randomSample(array: T[], count: number): T[] { - const shuffled = [...array].sort(() => Math.random() - 0.5); - return shuffled.slice(0, count); + const result = [...array]; + for (let i = result.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [result[i], result[j]] = [result[j], result[i]]; + } + return result.slice(0, count); } // Helper to get seeded random number for daily consistency @@ -129,7 +139,14 @@ function getSeededRandom(seed: string): number { // Type for track with album cover type TrackWithAlbumCover = { id: string; - album: { coverUrl: string | null; genres?: unknown }; + album: { + coverUrl: string | null; + genres?: unknown; + userGenres?: string[] | null; + artist?: { + userGenres?: string[] | null; + }; + }; lastfmTags?: string[]; essentiaGenres?: string[]; [key: string]: unknown; @@ -154,30 +171,71 @@ async function findTracksByGenrePatterns( { essentiaGenres: { hasSome: tagPatterns } }, ], }, - include: { album: { select: { coverUrl: true, genres: true } } }, + include: { + album: { + select: { + coverUrl: true, + genres: true, + userGenres: true, + artist: { + select: { + userGenres: true, + }, + }, + }, + }, + }, take: limit, }); if (tracks.length >= 15) { - return tracks; + return tracks as TrackWithAlbumCover[]; } - // Strategy 2: Query albums with non-empty genres and filter in memory + // Strategy 2: Query albums with non-empty genres (canonical or user) and filter in memory const albumTracks = await prisma.track.findMany({ where: { album: { - genres: { not: { equals: null } }, + OR: [ + { genres: { not: { equals: null } } }, + { userGenres: { not: { equals: null } } }, + ], + }, + }, + include: { + album: { + select: { + coverUrl: true, + genres: true, + userGenres: true, + artist: { + select: { + userGenres: true, + }, + }, + }, }, }, - include: { album: { select: { coverUrl: true, genres: true } } }, take: limit * 3, // Get more to filter down }); // Filter by genre patterns (case-insensitive partial match) + // Merge canonical and user genres from both album and artist const genreMatched = albumTracks.filter((t) => { const albumGenres = t.album.genres as string[] | null; - if (!albumGenres || !Array.isArray(albumGenres)) return false; - return albumGenres.some((ag) => + const albumUserGenres = (t.album.userGenres as string[] | null) || []; + const artistUserGenres = (t.album.artist?.userGenres as string[] | null) || []; + + // Merge all genres + const allGenres = [ + ...(albumGenres || []), + ...albumUserGenres, + ...artistUserGenres, + ]; + + if (allGenres.length === 0) return false; + + return allGenres.some((ag) => genrePatterns.some((gp) => ag.toLowerCase().includes(gp.toLowerCase()) ) @@ -191,7 +249,7 @@ async function findTracksByGenrePatterns( ...genreMatched.filter((t) => !existingIds.has(t.id)), ]; - return merged.slice(0, limit); + return merged.slice(0, limit) as TrackWithAlbumCover[]; } export class ProgrammaticPlaylistService { @@ -218,7 +276,7 @@ export class ProgrammaticPlaylistService { : `${today}-${userId}`; const dateSeed = getSeededRandom(seedString); - console.log( + logger.debug( `[MIXES] Generating mixes for user ${userId}, forceRandom: ${forceRandom}, seed: ${dateSeed}` ); @@ -444,7 +502,7 @@ export class ProgrammaticPlaylistService { const selectedIndices: number[] = []; let seed = dateSeed; - console.log( + logger.debug( `[MIXES] Selecting ${this.DAILY_MIX_COUNT} mixes from ${mixGenerators.length} types...` ); @@ -453,33 +511,33 @@ export class ProgrammaticPlaylistService { const index = seed % mixGenerators.length; if (!selectedIndices.includes(index)) { selectedIndices.push(index); - console.log( + logger.debug( `[MIXES] Selected index ${index}: ${mixGenerators[index].name}` ); } } - console.log( + logger.debug( `[MIXES] Final selected indices: [${selectedIndices.join(", ")}]` ); // Generate selected mixes const mixPromises = selectedIndices.map((i) => { - console.log(`[MIXES] Generating ${mixGenerators[i].name}...`); + logger.debug(`[MIXES] Generating ${mixGenerators[i].name}...`); return mixGenerators[i].fn(); }); const mixes = await Promise.all(mixPromises); - console.log(`[MIXES] Generated ${mixes.length} mixes before filtering`); + logger.debug(`[MIXES] Generated ${mixes.length} mixes before filtering`); mixes.forEach((mix, i) => { if (mix === null) { - console.log( + logger.debug( `[MIXES] Mix ${i} (${ mixGenerators[selectedIndices[i]].name }) returned NULL` ); } else { - console.log( + logger.debug( `[MIXES] Mix ${i}: ${mix.name} (${mix.trackCount} tracks)` ); } @@ -489,13 +547,13 @@ export class ProgrammaticPlaylistService { let finalMixes = mixes.filter( (mix): mix is ProgrammaticMix => mix !== null ); - console.log( + logger.debug( `[MIXES] Returning ${finalMixes.length} mixes after filtering nulls` ); // If we don't have 5 mixes, try to fill gaps with successful generators if (finalMixes.length < this.DAILY_MIX_COUNT) { - console.log( + logger.debug( `[MIXES] Only got ${finalMixes.length} mixes, trying to fill gaps...` ); @@ -510,34 +568,34 @@ export class ProgrammaticPlaylistService { i++ ) { if (!attemptedIndices.has(i)) { - console.log( + logger.debug( `[MIXES] Attempting fallback: ${mixGenerators[i].name}` ); const fallbackMix = await mixGenerators[i].fn(); if (fallbackMix && !successfulTypes.has(fallbackMix.type)) { finalMixes.push(fallbackMix); successfulTypes.add(fallbackMix.type); - console.log( + logger.debug( `[MIXES] Fallback succeeded: ${fallbackMix.name}` ); } } } - console.log(`[MIXES] After fallbacks: ${finalMixes.length} mixes`); + logger.debug(`[MIXES] After fallbacks: ${finalMixes.length} mixes`); } // Check if user has saved mood mix from the new bucket system (fast lookup) try { const savedMoodMix = await moodBucketService.getUserMoodMix(userId); if (savedMoodMix) { - console.log( + logger.debug( `[MIXES] User has saved mood mix: "${savedMoodMix.name}" with ${savedMoodMix.trackCount} tracks` ); finalMixes.push(savedMoodMix); } } catch (err) { - console.error("[MIXES] Error getting user's saved mood mix:", err); + logger.error("[MIXES] Error getting user's saved mood mix:", err); } return finalMixes; @@ -553,13 +611,14 @@ export class ProgrammaticPlaylistService { // Get all decades const albums = await prisma.album.findMany({ where: { tracks: { some: {} } }, - select: { year: true }, + select: { year: true, originalYear: true, displayYear: true }, }); const decades = new Set(); albums.forEach((album) => { - if (album.year) { - const decade = Math.floor(album.year / 10) * 10; + const effectiveYear = getEffectiveYear(album); + if (effectiveYear) { + const decade = getDecadeFromYear(effectiveYear); decades.add(decade); } }); @@ -574,9 +633,7 @@ export class ProgrammaticPlaylistService { // Get ALL tracks from this decade const tracks = await prisma.track.findMany({ where: { - album: { - year: { gte: selectedDecade, lt: selectedDecade + 10 }, - }, + album: getDecadeWhereClause(selectedDecade), }, include: { album: { select: { coverUrl: true } }, @@ -622,13 +679,13 @@ export class ProgrammaticPlaylistService { take: 20, }); - console.log(`[GENRE MIX] Found ${genres.length} genres total`); + logger.debug(`[GENRE MIX] Found ${genres.length} genres total`); const validGenres = genres.filter((g) => g._count.trackGenres >= 5); - console.log( + logger.debug( `[GENRE MIX] ${validGenres.length} genres have >= 5 tracks` ); if (validGenres.length === 0) { - console.log(`[GENRE MIX] FAILED: No genres with enough tracks`); + logger.debug(`[GENRE MIX] FAILED: No genres with enough tracks`); return null; } @@ -684,11 +741,11 @@ export class ProgrammaticPlaylistService { take: this.TRACK_LIMIT, }); - console.log( + logger.debug( `[TOP TRACKS MIX] Found ${playStats.length} unique played tracks` ); if (playStats.length < 5) { - console.log( + logger.debug( `[TOP TRACKS MIX] FAILED: Only ${playStats.length} tracks (need at least 5)` ); return null; @@ -796,11 +853,11 @@ export class ProgrammaticPlaylistService { }, }); - console.log( + logger.debug( `[ARTIST SIMILAR MIX] Found ${recentPlays.length} plays in last 7 days` ); if (recentPlays.length === 0) { - console.log(`[ARTIST SIMILAR MIX] FAILED: No plays in last 7 days`); + logger.debug(`[ARTIST SIMILAR MIX] FAILED: No plays in last 7 days`); return null; } @@ -824,13 +881,13 @@ export class ProgrammaticPlaylistService { }); if (!topArtist || !topArtist.name) { - console.log( + logger.debug( `[ARTIST SIMILAR MIX] FAILED: Top artist not found or has no name` ); return null; } - console.log(`[ARTIST SIMILAR MIX] Top artist: ${topArtist.name}`); + logger.debug(`[ARTIST SIMILAR MIX] Top artist: ${topArtist.name}`); // Get similar artists from Last.fm try { @@ -839,7 +896,7 @@ export class ProgrammaticPlaylistService { "10" ); - console.log( + logger.debug( `[ARTIST SIMILAR MIX] Last.fm returned ${similarArtists.length} similar artists` ); @@ -859,7 +916,7 @@ export class ProgrammaticPlaylistService { }, }); - console.log( + logger.debug( `[ARTIST SIMILAR MIX] Found ${artistsInLibrary.length} similar artists in library` ); @@ -867,12 +924,12 @@ export class ProgrammaticPlaylistService { artist.albums.flatMap((album) => album.tracks) ); - console.log( + logger.debug( `[ARTIST SIMILAR MIX] Total tracks from similar artists: ${tracks.length}` ); if (tracks.length < 5) { - console.log( + logger.debug( `[ARTIST SIMILAR MIX] FAILED: Only ${tracks.length} tracks (need at least 5)` ); return null; @@ -895,7 +952,7 @@ export class ProgrammaticPlaylistService { color: getMixColor("artist-similar"), }; } catch (error) { - console.error("Failed to generate artist similar mix:", error); + logger.error("Failed to generate artist similar mix:", error); return null; } } @@ -994,7 +1051,7 @@ export class ProgrammaticPlaylistService { }, }); tracks = genres.flatMap((g) => g.trackGenres.map((tg) => tg.track)); - console.log( + logger.debug( `[PARTY MIX] Found ${tracks.length} tracks from Genre table` ); @@ -1009,7 +1066,7 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[PARTY MIX] After album genre fallback: ${tracks.length} tracks` ); } @@ -1037,13 +1094,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...audioTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[PARTY MIX] After audio analysis fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[PARTY MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -1099,11 +1156,11 @@ export class ProgrammaticPlaylistService { take: 100, }); - console.log(`[CHILL MIX] Enhanced mode: Found ${tracks.length} tracks`); + logger.debug(`[CHILL MIX] Enhanced mode: Found ${tracks.length} tracks`); // Strategy 2: Standard mode fallback if (tracks.length < this.MIN_TRACKS_DAILY) { - console.log(`[CHILL MIX] Falling back to Standard mode`); + logger.debug(`[CHILL MIX] Falling back to Standard mode`); tracks = await prisma.track.findMany({ where: { analysisStatus: "completed", @@ -1125,17 +1182,17 @@ export class ProgrammaticPlaylistService { include: { album: { select: { coverUrl: true } } }, take: 100, }); - console.log( + logger.debug( `[CHILL MIX] Standard mode: Found ${tracks.length} tracks` ); } - console.log( + logger.debug( `[CHILL MIX] Total: ${tracks.length} tracks matching criteria` ); if (tracks.length < this.MIN_TRACKS_DAILY) { - console.log( + logger.debug( `[CHILL MIX] FAILED: Only ${tracks.length} tracks (need ${this.MIN_TRACKS_DAILY})` ); return null; @@ -1222,13 +1279,13 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = enhancedTracks; - console.log( + logger.debug( `[WORKOUT MIX] Enhanced mode: Found ${tracks.length} tracks` ); // Strategy 2: Standard mode fallback - audio analysis if (tracks.length < 15) { - console.log(`[WORKOUT MIX] Falling back to Standard mode`); + logger.debug(`[WORKOUT MIX] Falling back to Standard mode`); const audioTracks = await prisma.track.findMany({ where: { analysisStatus: "completed", @@ -1259,7 +1316,7 @@ export class ProgrammaticPlaylistService { ...tracks, ...audioTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[WORKOUT MIX] Standard mode: Total ${tracks.length} tracks` ); } @@ -1289,7 +1346,7 @@ export class ProgrammaticPlaylistService { ...tracks, ...genreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[WORKOUT MIX] After Genre table: ${tracks.length} tracks` ); } @@ -1305,13 +1362,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[WORKOUT MIX] After album genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[WORKOUT MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -1383,7 +1440,7 @@ export class ProgrammaticPlaylistService { }, }); tracks = genres.flatMap((g) => g.trackGenres.map((tg) => tg.track)); - console.log( + logger.debug( `[FOCUS MIX] Found ${tracks.length} tracks from Genre table` ); @@ -1398,7 +1455,7 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[FOCUS MIX] After album genre fallback: ${tracks.length} tracks` ); } @@ -1419,13 +1476,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...audioTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[FOCUS MIX] After audio analysis fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[FOCUS MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -1482,7 +1539,7 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = audioTracks; - console.log( + logger.debug( `[HIGH ENERGY MIX] Found ${tracks.length} tracks from audio analysis` ); @@ -1507,13 +1564,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[HIGH ENERGY MIX] After genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[HIGH ENERGY MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -1573,13 +1630,13 @@ export class ProgrammaticPlaylistService { take: 100, }); - console.log( + logger.debug( `[LATE NIGHT MIX] Enhanced mode: Found ${tracks.length} tracks` ); // Fallback to Standard mode if not enough Enhanced tracks if (tracks.length < this.MIN_TRACKS_DAILY) { - console.log(`[LATE NIGHT MIX] Falling back to Standard mode`); + logger.debug(`[LATE NIGHT MIX] Falling back to Standard mode`); tracks = await prisma.track.findMany({ where: { analysisStatus: "completed", @@ -1601,18 +1658,18 @@ export class ProgrammaticPlaylistService { include: { album: { select: { coverUrl: true } } }, take: 100, }); - console.log( + logger.debug( `[LATE NIGHT MIX] Standard mode: Found ${tracks.length} tracks` ); } - console.log( + logger.debug( `[LATE NIGHT MIX] Total: ${tracks.length} tracks matching criteria` ); // No fallback padding - if not enough truly mellow tracks, don't generate if (tracks.length < this.MIN_TRACKS_DAILY) { - console.log( + logger.debug( `[LATE NIGHT MIX] FAILED: Only ${tracks.length} tracks (need ${this.MIN_TRACKS_DAILY})` ); return null; @@ -1672,7 +1729,7 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = enhancedTracks; - console.log(`[HAPPY MIX] Enhanced mode: Found ${tracks.length} tracks`); + logger.debug(`[HAPPY MIX] Enhanced mode: Found ${tracks.length} tracks`); // Strategy 2: Standard mode fallback - valence/energy heuristics if (tracks.length < 15) { @@ -1690,7 +1747,7 @@ export class ProgrammaticPlaylistService { ...tracks, ...standardTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[HAPPY MIX] After Standard fallback: ${tracks.length} tracks` ); } @@ -1715,13 +1772,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[HAPPY MIX] After genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[HAPPY MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -1774,7 +1831,7 @@ export class ProgrammaticPlaylistService { include: { album: { select: { coverUrl: true } } }, take: 150, }); - console.log( + logger.debug( `[MELANCHOLY MIX] Enhanced mode: Found ${enhancedTracks.length} tracks` ); @@ -1782,7 +1839,7 @@ export class ProgrammaticPlaylistService { tracks = enhancedTracks; } else { // Strategy 2: Standard mode fallback - console.log(`[MELANCHOLY MIX] Falling back to Standard mode`); + logger.debug(`[MELANCHOLY MIX] Falling back to Standard mode`); const audioTracks = await prisma.track.findMany({ where: { analysisStatus: "completed", @@ -1792,7 +1849,7 @@ export class ProgrammaticPlaylistService { include: { album: { select: { coverUrl: true } } }, take: 150, }); - console.log( + logger.debug( `[MELANCHOLY MIX] Standard mode: Found ${audioTracks.length} low-valence tracks` ); @@ -1820,7 +1877,7 @@ export class ProgrammaticPlaylistService { ); return hasMinorKey || hasSadTags || hasLastfmSadTags; }); - console.log( + logger.debug( `[MELANCHOLY MIX] After tag filter: ${tracks.length} tracks` ); } @@ -1844,14 +1901,14 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[MELANCHOLY MIX] After genre fallback: ${tracks.length} tracks` ); } // Require minimum 15 tracks for a meaningful playlist if (tracks.length < 15) { - console.log( + logger.debug( `[MELANCHOLY MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -1919,7 +1976,7 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = audioTracks; - console.log( + logger.debug( `[DANCE FLOOR MIX] Found ${tracks.length} tracks from audio analysis` ); @@ -1943,13 +2000,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[DANCE FLOOR MIX] After genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[DANCE FLOOR MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -2002,7 +2059,7 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = audioTracks; - console.log( + logger.debug( `[ACOUSTIC MIX] Found ${tracks.length} tracks from audio analysis` ); @@ -2024,13 +2081,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[ACOUSTIC MIX] After genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[ACOUSTIC MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -2083,7 +2140,7 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = audioTracks; - console.log( + logger.debug( `[INSTRUMENTAL MIX] Found ${tracks.length} tracks from audio analysis` ); @@ -2106,13 +2163,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[INSTRUMENTAL MIX] After genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[INSTRUMENTAL MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -2226,7 +2283,7 @@ export class ProgrammaticPlaylistService { take: 100, }); tracks = taggedTracks; - console.log(`[ROAD TRIP MIX] Found ${tracks.length} tracks from tags`); + logger.debug(`[ROAD TRIP MIX] Found ${tracks.length} tracks from tags`); // Strategy 2: Audio analysis (medium-high energy, good tempo) if (tracks.length < 15) { @@ -2244,7 +2301,7 @@ export class ProgrammaticPlaylistService { ...tracks, ...audioTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[ROAD TRIP MIX] After audio fallback: ${tracks.length} tracks` ); } @@ -2267,13 +2324,13 @@ export class ProgrammaticPlaylistService { ...tracks, ...albumGenreTracks.filter((t) => !existingIds.has(t.id)), ]; - console.log( + logger.debug( `[ROAD TRIP MIX] After genre fallback: ${tracks.length} tracks` ); } if (tracks.length < 15) { - console.log( + logger.debug( `[ROAD TRIP MIX] FAILED: Only ${tracks.length} tracks found` ); return null; @@ -3582,7 +3639,7 @@ export class ProgrammaticPlaylistService { useEnhancedMode = true; } else { // Not enough enhanced tracks - convert ML mood params to basic audio feature equivalents - console.log( + logger.debug( `[MoodMixer] Only ${enhancedCount} enhanced tracks, falling back to basic features` ); diff --git a/backend/src/services/rateLimiter.ts b/backend/src/services/rateLimiter.ts index 4a7bcad..106185c 100644 --- a/backend/src/services/rateLimiter.ts +++ b/backend/src/services/rateLimiter.ts @@ -6,6 +6,7 @@ */ import PQueue from "p-queue"; +import { logger } from "../utils/logger"; interface RateLimitConfig { /** Requests per interval */ @@ -81,6 +82,7 @@ class GlobalRateLimiter { private circuitBreakers: Map = new Map(); private globalPaused = false; private globalPauseUntil = 0; + private concurrencyMultiplier = 1; // 1-5 multiplier for user-configurable speed constructor() { // Initialize queues for each service @@ -103,7 +105,7 @@ class GlobalRateLimiter { }); } - console.log("Global rate limiter initialized"); + logger.debug("Global rate limiter initialized"); } /** @@ -127,7 +129,7 @@ class GlobalRateLimiter { // Check global pause if (this.globalPaused && Date.now() < this.globalPauseUntil) { const waitTime = this.globalPauseUntil - Date.now(); - console.log(`Global rate limit pause - waiting ${waitTime}ms`); + logger.debug(`Global rate limit pause - waiting ${waitTime}ms`); await this.sleep(waitTime); } @@ -138,7 +140,7 @@ class GlobalRateLimiter { if (elapsed < circuit.resetAfterMs) { // Circuit is open, wait or throw const waitTime = circuit.resetAfterMs - elapsed; - console.log( + logger.debug( `Circuit breaker open for ${service} - waiting ${waitTime}ms` ); await this.sleep(waitTime); @@ -183,7 +185,7 @@ class GlobalRateLimiter { config.baseDelay, error ); - console.warn( + logger.warn( `Rate limited by ${service} (attempt ${attempt + 1}/${ maxRetries + 1 }) - backing off ${delay}ms` @@ -197,7 +199,7 @@ class GlobalRateLimiter { 60000, circuit.resetAfterMs * 2 ); - console.warn( + logger.warn( `Circuit breaker opened for ${service} - will reset in ${circuit.resetAfterMs}ms` ); } @@ -245,7 +247,7 @@ class GlobalRateLimiter { pauseAll(durationMs: number) { this.globalPaused = true; this.globalPauseUntil = Date.now() + durationMs; - console.warn(`Global rate limiter paused for ${durationMs}ms`); + logger.warn(`Global rate limiter paused for ${durationMs}ms`); } /** @@ -254,7 +256,7 @@ class GlobalRateLimiter { resume() { this.globalPaused = false; this.globalPauseUntil = 0; - console.log("Global rate limiter resumed"); + logger.debug("Global rate limiter resumed"); } /** @@ -290,6 +292,39 @@ class GlobalRateLimiter { } } + /** + * Update concurrency multiplier for parallel enrichment processing + * This allows power users to increase enrichment speed while respecting API rate limits + * @param multiplier 1-5, where 1 is conservative and 5 is maximum + */ + updateConcurrencyMultiplier(multiplier: number) { + const clampedMultiplier = Math.max(1, Math.min(5, multiplier)); + this.concurrencyMultiplier = clampedMultiplier; + + logger.debug(`[Rate Limiter] Updating concurrency multiplier to ${clampedMultiplier}`); + + // Update all service queues with new concurrency + for (const [service, config] of Object.entries(SERVICE_CONFIGS)) { + const queue = this.queues.get(service as ServiceName); + if (queue) { + // Scale concurrency by multiplier, but never exceed intervalCap (rate limit) + const newConcurrency = Math.min( + config.concurrency * clampedMultiplier, + config.intervalCap + ); + queue.concurrency = newConcurrency; + logger.debug(` → ${service}: ${config.concurrency} → ${newConcurrency}`); + } + } + } + + /** + * Get current concurrency multiplier + */ + getConcurrencyMultiplier(): number { + return this.concurrencyMultiplier; + } + private sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } @@ -300,4 +335,3 @@ export const rateLimiter = new GlobalRateLimiter(); // Export types for use in other services export type { ServiceName, RateLimitConfig }; - diff --git a/backend/src/services/rss-parser.ts b/backend/src/services/rss-parser.ts index 75bf9c3..2701d41 100644 --- a/backend/src/services/rss-parser.ts +++ b/backend/src/services/rss-parser.ts @@ -1,4 +1,5 @@ import Parser from "rss-parser"; +import { logger } from "../utils/logger"; interface RSSPodcast { title: string; @@ -36,18 +37,18 @@ class RSSParserService { this.parser = new Parser({ customFields: { feed: [ - ["itunes:author", "itunesAuthor"], - ["itunes:image", "itunesImage"], - ["itunes:explicit", "itunesExplicit"], - ["itunes:type", "itunesType"], + ["itunes:author", "itunesAuthor"] as any, + ["itunes:image", "itunesImage"] as any, + ["itunes:explicit", "itunesExplicit"] as any, + ["itunes:type", "itunesType"] as any, ], item: [ - ["itunes:author", "itunesAuthor"], - ["itunes:duration", "itunesDuration"], - ["itunes:image", "itunesImage"], - ["itunes:episode", "itunesEpisode"], - ["itunes:season", "itunesSeason"], - ["itunes:explicit", "itunesExplicit"], + ["itunes:author", "itunesAuthor"] as any, + ["itunes:duration", "itunesDuration"] as any, + ["itunes:image", "itunesImage"] as any, + ["itunes:episode", "itunesEpisode"] as any, + ["itunes:season", "itunesSeason"] as any, + ["itunes:explicit", "itunesExplicit"] as any, ], }, }); @@ -58,7 +59,7 @@ class RSSParserService { */ async parseFeed(feedUrl: string): Promise { try { - console.log(`\n [RSS PARSER] Fetching feed: ${feedUrl}`); + logger.debug(`\n [RSS PARSER] Fetching feed: ${feedUrl}`); const feed = await this.parser.parseURL(feedUrl); // Extract podcast metadata @@ -72,9 +73,9 @@ class RSSParserService { itunesId: this.extractItunesId(feed), }; - console.log(` Podcast: ${podcast.title}`); - console.log(` Author: ${podcast.author || "Unknown"}`); - console.log(` Episodes found: ${feed.items?.length || 0}`); + logger.debug(` Podcast: ${podcast.title}`); + logger.debug(` Author: ${podcast.author || "Unknown"}`); + logger.debug(` Episodes found: ${feed.items?.length || 0}`); // Extract episodes const episodes: RSSEpisode[] = (feed.items || []) @@ -83,7 +84,7 @@ class RSSParserService { // Find audio enclosure const audioEnclosure = this.findAudioEnclosure(item); if (!audioEnclosure) { - console.warn( + logger.warn( ` Skipping episode "${item.title}" - no audio found` ); return null; @@ -121,7 +122,7 @@ class RSSParserService { return episode; } catch (error: any) { - console.error( + logger.error( ` Error parsing episode "${item.title}":`, error.message ); @@ -130,11 +131,11 @@ class RSSParserService { }) .filter((ep): ep is RSSEpisode => ep !== null); - console.log(` Successfully parsed ${episodes.length} episodes`); + logger.debug(` Successfully parsed ${episodes.length} episodes`); return { podcast, episodes }; } catch (error: any) { - console.error( + logger.error( `\n [RSS PARSER] Failed to parse feed:`, error.message ); diff --git a/backend/src/services/search.ts b/backend/src/services/search.ts index 7fb96c0..a3645ce 100644 --- a/backend/src/services/search.ts +++ b/backend/src/services/search.ts @@ -1,4 +1,5 @@ import { prisma } from "../utils/db"; +import { logger } from "../utils/logger"; import { redisClient } from "../utils/redis"; interface SearchOptions { @@ -43,6 +44,31 @@ interface PodcastSearchResult { description: string | null; imageUrl: string | null; episodeCount: number; + rank?: number; +} + +interface EpisodeSearchResult { + id: string; + title: string; + description: string | null; + podcastId: string; + podcastTitle: string; + publishedAt: Date; + duration: number; + audioUrl: string; + rank: number; +} + +interface AudiobookSearchResult { + id: string; + title: string; + author: string | null; + narrator: string | null; + series: string | null; + description: string | null; + coverUrl: string | null; + duration: number | null; + rank: number; } export class SearchService { @@ -54,6 +80,7 @@ export class SearchService { private queryToTsquery(query: string): string { return query .trim() + .replace(/\s*&\s*/g, " and ") .split(/\s+/) .map((term) => `${term.replace(/[^\w]/g, "")}:*`) .join(" & "); @@ -77,9 +104,9 @@ export class SearchService { name, mbid, "heroUrl", - ts_rank(search_vector, to_tsquery('english', ${tsquery})) AS rank + ts_rank("searchVector", to_tsquery('english', ${tsquery})) AS rank FROM "Artist" - WHERE search_vector @@ to_tsquery('english', ${tsquery}) + WHERE "searchVector" @@ to_tsquery('english', ${tsquery}) ORDER BY rank DESC, name ASC LIMIT ${limit} OFFSET ${offset} @@ -87,7 +114,7 @@ export class SearchService { return results; } catch (error) { - console.error("Artist search error:", error); + logger.error("Artist search error:", error); // Fallback to LIKE query if full-text search fails const results = await prisma.artist.findMany({ where: { @@ -134,13 +161,13 @@ export class SearchService { a.year, a."coverUrl", GREATEST( - ts_rank(a.search_vector, to_tsquery('english', ${tsquery})), - ts_rank(ar.search_vector, to_tsquery('english', ${tsquery})) + ts_rank(a."searchVector", to_tsquery('english', ${tsquery})), + ts_rank(ar."searchVector", to_tsquery('english', ${tsquery})) ) AS rank FROM "Album" a LEFT JOIN "Artist" ar ON a."artistId" = ar.id - WHERE a.search_vector @@ to_tsquery('english', ${tsquery}) - OR ar.search_vector @@ to_tsquery('english', ${tsquery}) + WHERE a."searchVector" @@ to_tsquery('english', ${tsquery}) + OR ar."searchVector" @@ to_tsquery('english', ${tsquery}) ORDER BY rank DESC, a.title ASC LIMIT ${limit} OFFSET ${offset} @@ -148,7 +175,7 @@ export class SearchService { return results; } catch (error) { - console.error("Album search error:", error); + logger.error("Album search error:", error); // Fallback to LIKE query - search both album title and artist name const results = await prisma.album.findMany({ where: { @@ -221,11 +248,11 @@ export class SearchService { a.title as "albumTitle", a."artistId", ar.name as "artistName", - ts_rank(t.search_vector, to_tsquery('english', ${tsquery})) AS rank + ts_rank(t."searchVector", to_tsquery('english', ${tsquery})) AS rank FROM "Track" t LEFT JOIN "Album" a ON t."albumId" = a.id LEFT JOIN "Artist" ar ON a."artistId" = ar.id - WHERE t.search_vector @@ to_tsquery('english', ${tsquery}) + WHERE t."searchVector" @@ to_tsquery('english', ${tsquery}) ORDER BY rank DESC, t.title ASC LIMIT ${limit} OFFSET ${offset} @@ -233,7 +260,7 @@ export class SearchService { return results; } catch (error) { - console.error("Track search error:", error); + logger.error("Track search error:", error); // Fallback to LIKE query const results = await prisma.track.findMany({ where: { @@ -279,6 +306,238 @@ export class SearchService { } } + /** + * Search podcasts using PostgreSQL full-text search + */ + async searchPodcastsFTS({ + query, + limit = 20, + offset = 0, + }: SearchOptions): Promise { + if (!query || query.trim().length === 0) { + return []; + } + + const tsquery = this.queryToTsquery(query); + + try { + const results = await prisma.$queryRaw` + SELECT + id, + title, + author, + description, + "imageUrl", + "episodeCount", + ts_rank("searchVector", to_tsquery('english', ${tsquery})) AS rank + FROM "Podcast" + WHERE "searchVector" @@ to_tsquery('english', ${tsquery}) + ORDER BY rank DESC, title ASC + LIMIT ${limit} + OFFSET ${offset} + `; + + return results; + } catch (error) { + logger.error("Podcast FTS search error:", error); + // Fallback to LIKE search + return this.searchPodcasts({ query, limit, offset }); + } + } + + /** + * Search podcast episodes using PostgreSQL full-text search + */ + async searchEpisodes({ + query, + limit = 20, + offset = 0, + }: SearchOptions): Promise { + if (!query || query.trim().length === 0) { + return []; + } + + const tsquery = this.queryToTsquery(query); + + try { + const results = await prisma.$queryRaw` + SELECT + e.id, + e.title, + e.description, + e."podcastId", + e."publishedAt", + e.duration, + e."audioUrl", + p.title as "podcastTitle", + ts_rank(e."searchVector", to_tsquery('english', ${tsquery})) AS rank + FROM "PodcastEpisode" e + LEFT JOIN "Podcast" p ON e."podcastId" = p.id + WHERE e."searchVector" @@ to_tsquery('english', ${tsquery}) + ORDER BY rank DESC, e."publishedAt" DESC + LIMIT ${limit} + OFFSET ${offset} + `; + + return results; + } catch (error) { + logger.error("Episode search error:", error); + // Fallback to LIKE search + const results = await prisma.podcastEpisode.findMany({ + where: { + OR: [ + { + title: { + contains: query, + mode: "insensitive", + }, + }, + { + description: { + contains: query, + mode: "insensitive", + }, + }, + ], + }, + select: { + id: true, + title: true, + description: true, + podcastId: true, + publishedAt: true, + duration: true, + audioUrl: true, + podcast: { + select: { + title: true, + }, + }, + }, + take: limit, + skip: offset, + orderBy: { + publishedAt: "desc", + }, + }); + + return results.map((r) => ({ + id: r.id, + title: r.title, + description: r.description, + podcastId: r.podcastId, + podcastTitle: r.podcast.title, + publishedAt: r.publishedAt, + duration: r.duration, + audioUrl: r.audioUrl, + rank: 0, + })); + } + } + + /** + * Search audiobooks using PostgreSQL full-text search + * Falls back to external API if local cache is empty + */ + async searchAudiobooksFTS({ + query, + limit = 20, + offset = 0, + }: SearchOptions): Promise { + if (!query || query.trim().length === 0) { + return []; + } + + const tsquery = this.queryToTsquery(query); + + try { + const results = await prisma.$queryRaw` + SELECT + id, + title, + author, + narrator, + series, + description, + "coverUrl", + duration, + ts_rank("searchVector", to_tsquery('english', ${tsquery})) AS rank + FROM "Audiobook" + WHERE "searchVector" @@ to_tsquery('english', ${tsquery}) + ORDER BY rank DESC, title ASC + LIMIT ${limit} + OFFSET ${offset} + `; + + // If we have results from cache, return them with transformed coverUrl + if (results.length > 0) { + return results.map((r) => ({ + ...r, + coverUrl: r.coverUrl ? `/audiobooks/${r.id}/cover` : null, + })); + } + + // If cache is empty, fall back to LIKE search on cached audiobooks + const likeResults = await prisma.audiobook.findMany({ + where: { + OR: [ + { + title: { + contains: query, + mode: "insensitive", + }, + }, + { + author: { + contains: query, + mode: "insensitive", + }, + }, + { + narrator: { + contains: query, + mode: "insensitive", + }, + }, + { + series: { + contains: query, + mode: "insensitive", + }, + }, + ], + }, + select: { + id: true, + title: true, + author: true, + narrator: true, + series: true, + description: true, + coverUrl: true, + duration: true, + }, + take: limit, + skip: offset, + orderBy: { + title: "asc", + }, + }); + + return likeResults.map((r) => ({ + ...r, + coverUrl: r.coverUrl ? `/audiobooks/${r.id}/cover` : null, + rank: 0, + })); + } catch (error) { + logger.error("Audiobook FTS search error:", error); + return []; + } + } + + /** + * Legacy LIKE-based podcast search (kept as fallback) + */ async searchPodcasts({ query, limit = 20, @@ -288,7 +547,7 @@ export class SearchService { return []; } - // Simple LIKE search for podcasts (no full-text search vector on podcasts yet) + // Simple LIKE search for podcasts (fallback) try { const results = await prisma.podcast.findMany({ where: { @@ -330,7 +589,7 @@ export class SearchService { return results; } catch (error) { - console.error("Podcast search error:", error); + logger.error("Podcast search error:", error); return []; } } @@ -342,6 +601,8 @@ export class SearchService { albums: [], tracks: [], podcasts: [], + audiobooks: [], + episodes: [], }; } @@ -350,31 +611,53 @@ export class SearchService { try { const cached = await redisClient.get(cacheKey); if (cached) { - console.log(`[SEARCH] Cache HIT for query: "${query}"`); - return JSON.parse(cached); + logger.debug(`[SEARCH] Cache HIT for query: "${query}"`); + const parsed = JSON.parse(cached); + // Transform cached audiobook coverUrls to ensure consistency + if (parsed.audiobooks && Array.isArray(parsed.audiobooks)) { + parsed.audiobooks = parsed.audiobooks.map( + (book: AudiobookSearchResult) => ({ + ...book, + coverUrl: book.coverUrl + ? `/audiobooks/${book.id}/cover` + : null, + }) + ); + } + return parsed; } } catch (err) { - console.warn("[SEARCH] Redis cache read error:", err); + logger.warn("[SEARCH] Redis cache read error:", err); } - console.log( + logger.debug( `[SEARCH] Cache MISS for query: "${query}" - fetching from database` ); - const [artists, albums, tracks, podcasts] = await Promise.all([ - this.searchArtists({ query, limit }), - this.searchAlbums({ query, limit }), - this.searchTracks({ query, limit }), - this.searchPodcasts({ query, limit }), - ]); + const [artists, albums, tracks, podcasts, audiobooks, episodes] = + await Promise.all([ + this.searchArtists({ query, limit }), + this.searchAlbums({ query, limit }), + this.searchTracks({ query, limit }), + this.searchPodcastsFTS({ query, limit }), + this.searchAudiobooksFTS({ query, limit }), + this.searchEpisodes({ query, limit }), + ]); - const results = { artists, albums, tracks, podcasts }; + const results = { + artists, + albums, + tracks, + podcasts, + audiobooks, + episodes, + }; // Cache for 1 hour (search results don't change often) try { await redisClient.setEx(cacheKey, 3600, JSON.stringify(results)); } catch (err) { - console.warn("[SEARCH] Redis cache write error:", err); + logger.warn("[SEARCH] Redis cache write error:", err); } return results; diff --git a/backend/src/services/simpleDownloadManager.ts b/backend/src/services/simpleDownloadManager.ts index d1361ed..e69fd4d 100644 --- a/backend/src/services/simpleDownloadManager.ts +++ b/backend/src/services/simpleDownloadManager.ts @@ -6,15 +6,24 @@ * No in-memory state - survives server restarts. */ +import { logger } from "../utils/logger"; import { prisma } from "../utils/db"; -import { lidarrService, LidarrRelease } from "./lidarr"; +import { Prisma, PrismaClient } from "@prisma/client"; +import { lidarrService, LidarrRelease, AcquisitionError, AcquisitionErrorType } from "./lidarr"; import { musicBrainzService } from "./musicbrainz"; import { getSystemSettings } from "../utils/systemSettings"; import { notificationService } from "./notificationService"; +import { notificationPolicyService } from "./notificationPolicyService"; import { sessionLog } from "../utils/playlistLogger"; import axios from "axios"; import * as crypto from "crypto"; +// Type for transactional prisma client +type TransactionClient = Omit< + PrismaClient, + "$connect" | "$disconnect" | "$on" | "$transaction" | "$use" | "$extends" +>; + // Generate a UUID v4 without external dependency function generateCorrelationId(): string { return crypto.randomUUID(); @@ -22,9 +31,10 @@ function generateCorrelationId(): string { class SimpleDownloadManager { private readonly DEFAULT_MAX_ATTEMPTS = 3; - // Increased timeouts for batch processing (Discovery requests 30+ albums at once) - private readonly IMPORT_TIMEOUT_MS = 30 * 60 * 1000; // 30 minutes (large batches need more time) - private readonly PENDING_TIMEOUT_MS = 30 * 60 * 1000; // 30 minutes for pending (batch queuing) + // Reduced timeouts for faster failure detection + private readonly IMPORT_TIMEOUT_MS = 15 * 60 * 1000; // 15 minutes for imports + private readonly PENDING_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes for pending + private readonly NO_SOURCE_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes for no sources found /** * Get max retry attempts from user's discover config, fallback to default @@ -40,6 +50,48 @@ class SimpleDownloadManager { } } + /** + * Transaction wrapper with retry logic for serialization conflicts + */ + private async withTransaction( + operation: (tx: TransactionClient) => Promise, + options?: { maxRetries?: number; logPrefix?: string } + ): Promise { + const maxRetries = options?.maxRetries ?? 3; + const logPrefix = options?.logPrefix ?? "[TX]"; + let lastError: Error | undefined; + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + return await prisma.$transaction(operation, { + isolationLevel: + Prisma.TransactionIsolationLevel.Serializable, + maxWait: 5000, + timeout: 10000, + }); + } catch (error: any) { + // Check for serialization failure + const isSerializationError = + error.code === "P2034" || + error.message?.includes("could not serialize") || + error.message?.includes("deadlock"); + + if (isSerializationError && attempt < maxRetries) { + lastError = error; + const delay = Math.pow(2, attempt) * 100; // 200ms, 400ms, 800ms + logger.debug( + `${logPrefix} Serialization conflict, retry ${attempt}/${maxRetries} after ${delay}ms` + ); + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + throw error; + } + } + + throw lastError; + } + /** * Start a new download * Returns the correlation ID for webhook matching @@ -52,10 +104,14 @@ class SimpleDownloadManager { albumMbid: string, userId: string, isDiscovery: boolean = false - ): Promise<{ success: boolean; correlationId?: string; error?: string }> { - console.log(`\n Starting download: ${artistName} - ${albumTitle}${isDiscovery ? " (discovery)" : ""}`); - console.log(` Job ID: ${jobId}`); - console.log(` Album MBID: ${albumMbid}`); + ): Promise<{ success: boolean; correlationId?: string; error?: string; errorType?: AcquisitionErrorType; isRecoverable?: boolean }> { + logger.debug( + `\n Starting download: ${artistName} - ${albumTitle}${ + isDiscovery ? " (discovery)" : "" + }` + ); + logger.debug(` Job ID: ${jobId}`); + logger.debug(` Album MBID: ${albumMbid}`); // Generate correlation ID for webhook matching const correlationId = generateCorrelationId(); @@ -64,21 +120,21 @@ class SimpleDownloadManager { // Fetch artist MBID from MusicBrainz using the album MBID let artistMbid: string | undefined; try { - console.log(` Fetching artist MBID from MusicBrainz...`); + logger.debug(` Fetching artist MBID from MusicBrainz...`); const releaseGroup = await musicBrainzService.getReleaseGroup( albumMbid ); if (releaseGroup?.["artist-credit"]?.[0]?.artist?.id) { artistMbid = releaseGroup["artist-credit"][0].artist.id; - console.log(` Found artist MBID: ${artistMbid}`); + logger.debug(` Found artist MBID: ${artistMbid}`); } else { - console.warn( + logger.warn( ` Could not extract artist MBID from release group` ); } } catch (mbError) { - console.error( + logger.error( ` Failed to fetch artist MBID from MusicBrainz:`, mbError ); @@ -100,12 +156,12 @@ class SimpleDownloadManager { ); } - console.log(` Album queued in Lidarr (ID: ${result.id})`); + logger.debug(` Album queued in Lidarr (ID: ${result.id})`); // Lidarr may have matched by name and returned a different MBID const actualLidarrMbid = result.foreignAlbumId; if (actualLidarrMbid && actualLidarrMbid !== albumMbid) { - console.log( + logger.debug( ` MBID mismatch - original: ${albumMbid}, Lidarr: ${actualLidarrMbid}` ); } @@ -118,7 +174,10 @@ class SimpleDownloadManager { select: { metadata: true }, }); const existingMetadata = (existingJob?.metadata as any) || {}; - + // Initialize status tracking for Lidarr download + const lidarrAttempts = (existingMetadata.lidarrAttempts || 0) + 1; + const statusText = `Lidarr #${lidarrAttempts}`; + await prisma.downloadJob.update({ where: { id: jobId }, data: { @@ -135,45 +194,120 @@ class SimpleDownloadManager { artistMbid, albumMbid, // Original requested MBID lidarrMbid: actualLidarrMbid, // Actual Lidarr MBID (may differ) - downloadType: existingMetadata.downloadType || "library", + downloadType: + existingMetadata.downloadType || "library", startedAt: now.toISOString(), // Backup in metadata for timeout tracking + currentSource: "lidarr" as const, + lidarrAttempts, + statusText, }, }, }); - console.log( + logger.debug( ` Download started with correlation ID: ${correlationId}` ); return { success: true, correlationId }; } catch (error: any) { - console.error(` Failed to start download:`, error.message); + logger.error(` Failed to start download:`, error.message); + + // Extract error properties if this is an AcquisitionError + const errorType = error instanceof AcquisitionError ? error.type : undefined; + const isRecoverable = error instanceof AcquisitionError ? error.isRecoverable : undefined; // Get the job to check if it's a discovery job const job = await prisma.downloadJob.findUnique({ where: { id: jobId }, }); + const existingMetadata = (job?.metadata as any) || {}; + + // Handle "No releases available" error - immediate failure + if (error.message?.includes("No releases available")) { + logger.debug(` No sources found - handling immediate failure`); + + // For discovery jobs, skip same-artist fallback + if (job?.discoveryBatchId) { + logger.debug( + ` Discovery job - skipping same-artist fallback (diversity enforced)` + ); + } else if (job && !job.discoveryBatchId) { + // For library downloads, try same-artist fallback + logger.debug( + ` Library download - trying same-artist fallback...` + ); + + const artistMbid = + job.artistMbid || existingMetadata.artistMbid; + + if (artistMbid) { + const fallbackResult = + await this.tryNextAlbumFromArtist( + { ...job, metadata: existingMetadata }, + "No sources available" + ); + + if (fallbackResult.retried && fallbackResult.jobId) { + return { success: true }; + } + } + } + + // Mark as failed with proper status text + await prisma.downloadJob.update({ + where: { id: jobId }, + data: { + correlationId, + status: "failed", + error: error.message, + completedAt: new Date(), + metadata: { + ...existingMetadata, + statusText: "No sources available", + failedAt: new Date().toISOString(), + }, + }, + }); + + // Check batch completion for discovery jobs + if (job?.discoveryBatchId) { + const { discoverWeeklyService } = await import( + "./discoverWeekly" + ); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); + } + + return { success: false, error: error.message, errorType, isRecoverable }; + } // If album wasn't found, try same-artist fallback ONLY for non-discovery jobs // Discovery jobs should find NEW artists via the discovery system instead if (job && error.message?.includes("album not found")) { if (job.discoveryBatchId) { - console.log(` Album not found - Discovery job, skipping same-artist fallback`); - console.log(` Discovery system will find a different artist instead`); + logger.debug( + ` Album not found - Discovery job, skipping same-artist fallback` + ); + logger.debug( + ` Discovery system will find a different artist instead` + ); } else { - console.log(` Album not found - trying same-artist fallback...`); + logger.debug( + ` Album not found - trying same-artist fallback...` + ); - // Use the new tryNextAlbumFromArtist approach instead of findReplacementAlbum - const metadata = (job.metadata as any) || {}; - const artistMbid = job.artistMbid || metadata.artistMbid; + const artistMbid = + job.artistMbid || existingMetadata.artistMbid; if (artistMbid) { - const fallbackResult = await this.tryNextAlbumFromArtist( - { ...job, metadata }, - "Album not found in Lidarr" - ); + const fallbackResult = + await this.tryNextAlbumFromArtist( + { ...job, metadata: existingMetadata }, + "Album not found in Lidarr" + ); if (fallbackResult.retried && fallbackResult.jobId) { - return { success: true, replacedWith: fallbackResult.jobId }; + return { success: true }; } } } @@ -187,23 +321,32 @@ class SimpleDownloadManager { status: "failed", error: error.message || "Failed to add album to Lidarr", completedAt: new Date(), + metadata: { + ...existingMetadata, + statusText: "Failed to start", + failedAt: new Date().toISOString(), + }, }, }); // Check batch completion for discovery jobs if (job?.discoveryBatchId) { - const { discoverWeeklyService } = await import("./discoverWeekly"); - await discoverWeeklyService.checkBatchCompletion(job.discoveryBatchId); + const { discoverWeeklyService } = await import( + "./discoverWeekly" + ); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); } - return { success: false, error: error.message }; + return { success: false, error: error.message, errorType, isRecoverable }; } } /** * Handle download grabbed event (from webhook) * Links the Lidarr downloadId to our job - * + * * IMPORTANT: One logical album = one job, regardless of MBID. * MBIDs can differ between MusicBrainz and Lidarr, but artist+album name is canonical. */ @@ -214,251 +357,237 @@ class SimpleDownloadManager { artistName: string, lidarrAlbumId: number ): Promise<{ matched: boolean; jobId?: string }> { - console.log(`[DOWNLOAD] Grabbed: ${artistName} - ${albumTitle}`); - console.log(` Download ID: ${downloadId}`); - console.log(` Album MBID: ${albumMbid}`); - console.log(` Lidarr Album ID: ${lidarrAlbumId}`); + logger.debug(`[DOWNLOAD] Grabbed: ${artistName} - ${albumTitle}`); + logger.debug(` Download ID: ${downloadId}`); - // Get ALL active jobs (pending + processing) for matching - // Include pending because job might not have transitioned to processing yet - const activeJobs = await prisma.downloadJob.findMany({ - where: { - status: { in: ["pending", "processing"] }, - }, - }); + return await this.withTransaction( + async (tx) => { + // ═══════════════════════════════════════════════════════════════ + // STEP 1: Idempotency Check - Already processed? + // ═══════════════════════════════════════════════════════════════ + const existingByRef = await tx.downloadJob.findFirst({ + where: { + metadata: { + path: ["downloadId"], + equals: downloadId, + }, + }, + }); - console.log( - ` Found ${activeJobs.length} active job(s) to match against` - ); - - let job: (typeof activeJobs)[0] | undefined; - - // Normalize artist/album for name-based matching - const normalizedArtist = artistName?.toLowerCase().trim() || ""; - const normalizedAlbum = albumTitle?.toLowerCase().trim() || ""; - - // Strategy 1: Match by targetMbid (exact MBID match) - job = activeJobs.find( - (j) => j.targetMbid === albumMbid && !j.lidarrRef - ); - if (job) { - console.log(` Matched by targetMbid`); - } - - // Strategy 2: Match by lidarrMbid in metadata - if (!job) { - job = activeJobs.find((j) => { - const metadata = j.metadata as any; - return metadata?.lidarrMbid === albumMbid && !j.lidarrRef; - }); - if (job) { - console.log(` Matched by lidarrMbid`); - } - } - - // Strategy 3: Match by lidarrAlbumId (stored when download started) - if (!job && lidarrAlbumId > 0) { - job = activeJobs.find((j) => { - const metadata = j.metadata as any; - return ( - (j as any).lidarrAlbumId === lidarrAlbumId || - metadata?.lidarrAlbumId === lidarrAlbumId - ); - }); - if (job) { - console.log(` Matched by lidarrAlbumId`); - } - } - - // Strategy 4: Match by artist + album title in metadata (CANONICAL - most important) - // This handles MBID mismatches between MusicBrainz and Lidarr - if (!job && normalizedArtist && normalizedAlbum) { - job = activeJobs.find((j) => { - if (j.lidarrRef) return false; // Already linked to a different download - const metadata = j.metadata as any; - const candidateArtist = metadata?.artistName?.toLowerCase().trim() || ""; - const candidateAlbum = metadata?.albumTitle?.toLowerCase().trim() || ""; - return ( - candidateArtist === normalizedArtist && - candidateAlbum === normalizedAlbum - ); - }); - if (job) { - console.log(` Matched by artist/album title in metadata`); - } - } - - // Strategy 5: Match by subject field (format: "Artist - Album") - if (!job && normalizedArtist && normalizedAlbum) { - job = activeJobs.find((j) => { - if (j.lidarrRef) return false; // Already linked - const subject = j.subject?.toLowerCase().trim() || ""; - // Check if subject contains BOTH artist AND album (more precise) - return ( - subject.includes(normalizedArtist) && - subject.includes(normalizedAlbum) - ); - }); - if (job) { - console.log(` Matched by subject field`); - } - } - - // Strategy 6: For retries - update job that already has a different lidarrRef - if (!job && lidarrAlbumId > 0) { - job = activeJobs.find((j) => { - const metadata = j.metadata as any; - return ( - ((j as any).lidarrAlbumId === lidarrAlbumId || - metadata?.lidarrAlbumId === lidarrAlbumId) && - j.lidarrRef !== null - ); - }); - if (job) { - console.log(` Matched retry by lidarrAlbumId (updating lidarrRef)`); - } - } - - if (!job) { - // Before creating a new job, do one final check: search ALL active jobs by name - // This catches jobs that might have been created with different casing or formatting - console.log(` No match in active jobs with first pass - doing thorough name search...`); - - // Search all active jobs (including ones we might have filtered out) - for (const j of activeJobs) { - if (j.lidarrRef) continue; // Already linked - - const metadata = j.metadata as any; - const candidateArtist = metadata?.artistName?.toLowerCase().trim() || ""; - const candidateAlbum = metadata?.albumTitle?.toLowerCase().trim() || ""; - const subject = j.subject?.toLowerCase().trim() || ""; - - // More lenient matching - check metadata OR subject - const artistMatches = - candidateArtist === normalizedArtist || - (normalizedArtist && subject.includes(normalizedArtist)); - const albumMatches = - candidateAlbum === normalizedAlbum || - (normalizedAlbum && subject.includes(normalizedAlbum)); - - if (artistMatches && albumMatches) { - console.log(` Found existing job by thorough name search: ${j.id}`); - job = j; - break; + if (existingByRef) { + logger.debug( + ` Already tracked by job: ${existingByRef.id}` + ); + return { matched: true, jobId: existingByRef.id }; } - } - } - if (!job) { - // Still no match - this is truly an external download or timing issue - // Create a tracking job, but first check we're not creating a duplicate - console.log(` No matching job found - checking for duplicates before creating tracking job`); + // ═══════════════════════════════════════════════════════════════ + // STEP 2: Query Unassigned Jobs (Transaction ensures consistent view) + // Only get jobs not yet assigned to a download (lidarrRef IS NULL) + // ═══════════════════════════════════════════════════════════════ + const activeJobs = await tx.downloadJob.findMany({ + where: { + status: { in: ["pending", "processing"] }, + lidarrRef: null, // Not yet assigned to a download + }, + }); - // Check if there's already a tracking job for this exact download - const existingTrackingJob = await prisma.downloadJob.findFirst({ - where: { - lidarrRef: downloadId, - }, - }); + logger.debug( + ` Found ${activeJobs.length} unassigned active job(s)` + ); - if (existingTrackingJob) { - console.log(` Tracking job already exists: ${existingTrackingJob.id}`); - return { matched: true, jobId: existingTrackingJob.id }; - } + // Normalize for matching + const normalizedArtist = artistName?.toLowerCase().trim() || ""; + const normalizedAlbum = albumTitle?.toLowerCase().trim() || ""; - // Check if there's a job for this artist+album that we somehow missed - const duplicateCheck = await prisma.downloadJob.findFirst({ - where: { - status: { in: ["pending", "processing"] }, - OR: [ - { targetMbid: albumMbid }, - { lidarrAlbumId: lidarrAlbumId > 0 ? lidarrAlbumId : undefined }, - ], - }, - }); + // ═══════════════════════════════════════════════════════════════ + // STEP 3: Apply Matching Strategies (In Priority Order) + // ═══════════════════════════════════════════════════════════════ + let matchedJob: (typeof activeJobs)[0] | undefined; + let matchStrategy = ""; - if (duplicateCheck) { - console.log(` Found job by MBID/lidarrAlbumId: ${duplicateCheck.id} - linking instead of creating new`); - job = duplicateCheck; - } - } + // Strategy 1: targetMbid + matchedJob = activeJobs.find((j) => j.targetMbid === albumMbid); + if (matchedJob) matchStrategy = "targetMbid"; - if (!job) { - // Truly no existing job - create tracking job for retry support - console.log(` Creating tracking job for untracked download`); + // Strategy 2: lidarrMbid in metadata + if (!matchedJob) { + matchedJob = activeJobs.find((j) => { + const meta = j.metadata as any; + return meta?.lidarrMbid === albumMbid; + }); + if (matchedJob) matchStrategy = "lidarrMbid"; + } - try { - // Find the user from a recent artist download request - const recentJob = await prisma.downloadJob.findFirst({ + // Strategy 3: lidarrAlbumId + if (!matchedJob && lidarrAlbumId > 0) { + matchedJob = activeJobs.find((j) => { + const meta = j.metadata as any; + return ( + (j as any).lidarrAlbumId === lidarrAlbumId || + meta?.lidarrAlbumId === lidarrAlbumId + ); + }); + if (matchedJob) matchStrategy = "lidarrAlbumId"; + } + + // Strategy 4: Artist + Album name (canonical match) + if (!matchedJob && normalizedArtist && normalizedAlbum) { + matchedJob = activeJobs.find((j) => { + const meta = j.metadata as any; + const candArtist = + meta?.artistName?.toLowerCase().trim() || ""; + const candAlbum = + meta?.albumTitle?.toLowerCase().trim() || ""; + return ( + candArtist === normalizedArtist && + candAlbum === normalizedAlbum + ); + }); + if (matchedJob) matchStrategy = "artist+album"; + } + + // Strategy 5: Subject field + if (!matchedJob && normalizedArtist && normalizedAlbum) { + matchedJob = activeJobs.find((j) => { + const subject = j.subject?.toLowerCase().trim() || ""; + return ( + subject.includes(normalizedArtist) && + subject.includes(normalizedAlbum) + ); + }); + if (matchedJob) matchStrategy = "subject"; + } + + // ═══════════════════════════════════════════════════════════════ + // STEP 4: Update Matched Job OR Create Tracking Job (Atomic) + // ═══════════════════════════════════════════════════════════════ + if (matchedJob) { + logger.debug( + ` Matched by ${matchStrategy}: ${matchedJob.id}` + ); + + await tx.downloadJob.update({ + where: { id: matchedJob.id }, + data: { + status: "processing", + lidarrRef: downloadId, + lidarrAlbumId, + targetMbid: matchedJob.targetMbid || albumMbid, + metadata: { + ...((matchedJob.metadata as any) || {}), + downloadId, + lidarrMbid: albumMbid, + grabbedAt: new Date().toISOString(), + }, + }, + }); + + return { matched: true, jobId: matchedJob.id }; + } + + // No match - check for duplicates before creating tracking job + logger.debug(` No match found, checking for duplicates...`); + + // ═══════════════════════════════════════════════════════════════ + // DUPLICATE DETECTION: Prevent creating duplicate tracking jobs + // This prevents the "Beatles Abbey Road" issue where the same + // album is downloaded twice by SABnzbd, causing file deletions. + // ═══════════════════════════════════════════════════════════════ + + // Normalize for duplicate detection + const normalizedArtistForDup = artistName?.toLowerCase().trim() || ""; + const normalizedAlbumForDup = albumTitle?.toLowerCase().trim() || ""; + + // Check by MBID first (most reliable) + let existingJob = null; + if (albumMbid) { + existingJob = await tx.downloadJob.findFirst({ + where: { + targetMbid: albumMbid, + status: { in: ["pending", "processing", "completed"] }, + }, + }); + } + + // If no MBID match, check by artist+album name + if (!existingJob && normalizedArtistForDup && normalizedAlbumForDup) { + const candidateJobs = await tx.downloadJob.findMany({ + where: { + status: { in: ["pending", "processing", "completed"] }, + }, + }); + + existingJob = candidateJobs.find((j) => { + const meta = j.metadata as any; + const candArtist = meta?.artistName?.toLowerCase().trim() || ""; + const candAlbum = meta?.albumTitle?.toLowerCase().trim() || ""; + return ( + candArtist === normalizedArtistForDup && + candAlbum === normalizedAlbumForDup + ); + }); + } + + // If duplicate found, log warning and exit early + if (existingJob) { + logger.warn(`[DownloadManager] Duplicate download detected`, { + artist: artistName, + album: albumTitle, + mbid: albumMbid, + existingJobId: existingJob.id, + }); + return { matched: false }; + } + + logger.debug(` No duplicates found, creating tracking job`); + + // Find user from recent artist download + const recentJob = await tx.downloadJob.findFirst({ where: { type: "artist", status: { in: ["pending", "processing", "completed"] }, - metadata: { - path: ["artistName"], - string_contains: artistName, - }, }, orderBy: { createdAt: "desc" }, }); - const userId = recentJob?.userId; - - if (userId) { - const newJob = await prisma.downloadJob.create({ - data: { - userId, - subject: `${artistName} - ${albumTitle}`, - type: "album", - targetMbid: albumMbid, - status: "processing", - lidarrRef: downloadId, - lidarrAlbumId, - attempts: 1, - metadata: { - artistName, - albumTitle, - downloadId, - grabbedAt: new Date().toISOString(), - source: "lidarr-auto-grab", - }, - }, - }); - console.log(` Created tracking job: ${newJob.id}`); - return { matched: true, jobId: newJob.id }; - } else { - console.log(` Could not determine user, skipping job creation`); + if (!recentJob?.userId) { + logger.debug( + ` Cannot determine user, skipping job creation` + ); return { matched: false }; } - } catch (error: any) { - console.log(` Failed to create tracking job: ${error.message}`); - return { matched: false }; - } - } - // Update job with Lidarr reference and ensure status is processing - await prisma.downloadJob.update({ - where: { id: job.id }, - data: { - status: "processing", // Ensure status is processing (might have been pending) - lidarrRef: downloadId, - lidarrAlbumId, - targetMbid: job.targetMbid || albumMbid, // Keep original or use Lidarr's - metadata: { - ...((job.metadata as any) || {}), - downloadId, - lidarrMbid: albumMbid, // Store Lidarr's MBID for future matching - grabbedAt: new Date().toISOString(), - }, + const trackingJob = await tx.downloadJob.create({ + data: { + userId: recentJob.userId, + subject: `${artistName} - ${albumTitle}`, + type: "album", + targetMbid: albumMbid, + status: "processing", + lidarrRef: downloadId, + lidarrAlbumId, + attempts: 1, + metadata: { + artistName, + albumTitle, + downloadId, + grabbedAt: new Date().toISOString(), + source: "lidarr-auto-grab", + }, + }, + }); + + logger.debug(` Created tracking job: ${trackingJob.id}`); + return { matched: true, jobId: trackingJob.id }; }, - }); - - console.log(` Linked to job: ${job.id}`); - return { matched: true, jobId: job.id }; + { logPrefix: "[GRAB-TX]" } + ); } /** * Handle download complete event (from webhook) - * + * * IMPORTANT: One logical album = one job. Match by name if MBID doesn't match. */ async onDownloadComplete( @@ -467,238 +596,243 @@ class SimpleDownloadManager { artistName?: string, albumTitle?: string, lidarrAlbumId?: number - ): Promise<{ jobId?: string; batchId?: string; downloadBatchId?: string }> { - console.log(`\n[COMPLETE] Download completed: ${downloadId}`); - if (albumMbid) console.log(` Album MBID: ${albumMbid}`); - if (lidarrAlbumId) console.log(` Lidarr Album ID: ${lidarrAlbumId}`); - if (artistName && albumTitle) - console.log(` Album: ${artistName} - ${albumTitle}`); + ): Promise<{ + jobId?: string; + batchId?: string; + downloadBatchId?: string; + spotifyImportJobId?: string; + }> { + logger.debug(`\n[COMPLETE] Download completed: ${downloadId}`); - // Get ALL active jobs (pending + processing) for matching - const activeJobs = await prisma.downloadJob.findMany({ - where: { status: { in: ["pending", "processing"] } }, - }); - - console.log( - ` Found ${activeJobs.length} active job(s) to match against` - ); - - // Normalize for name matching - const normalizedArtist = artistName?.toLowerCase().trim() || ""; - const normalizedAlbum = albumTitle?.toLowerCase().trim() || ""; - - let job: (typeof activeJobs)[0] | undefined; - let matchedJobs: (typeof activeJobs) = []; - - // Strategy 1: Find job by lidarrRef (most reliable) - job = activeJobs.find((j) => j.lidarrRef === downloadId); - if (job) console.log(` Matched by lidarrRef`); - - // Strategy 2: Find job by lidarrAlbumId - if (!job && lidarrAlbumId) { - job = activeJobs.find((j) => j.lidarrAlbumId === lidarrAlbumId); - if (job) console.log(` Matched by lidarrAlbumId`); - } - - // Strategy 3: Match by previousDownloadIds (for retried downloads) - if (!job) { - job = activeJobs.find((j) => { - const metadata = j.metadata as any; - const prevIds = metadata?.previousDownloadIds as string[] | undefined; - return prevIds?.includes(downloadId); - }); - if (job) console.log(` Matched by previousDownloadIds`); - } - - // Strategy 4: Match by MBID (targetMbid or lidarrMbid in metadata) - if (!job && albumMbid) { - job = activeJobs.find((j) => j.targetMbid === albumMbid); - if (job) { - console.log(` Matched by targetMbid`); - } else { - job = activeJobs.find((j) => { - const metadata = j.metadata as any; - return metadata?.lidarrMbid === albumMbid; + const result = await this.withTransaction( + async (tx) => { + // ═══════════════════════════════════════════════════════════════ + // STEP 1: Check if already completed (idempotency) + // ═══════════════════════════════════════════════════════════════ + const completedJob = await tx.downloadJob.findFirst({ + where: { + metadata: { + path: ["downloadId"], + equals: downloadId, + }, + status: "completed", + }, }); - if (job) console.log(` Matched by lidarrMbid in metadata`); - } - } - // Strategy 5: Match by artist+album name (CANONICAL - handles MBID mismatches) - if (!job && normalizedArtist && normalizedAlbum) { - // Find ALL jobs matching this artist+album (we'll dedupe after) - matchedJobs = activeJobs.filter((j) => { - const metadata = j.metadata as any; - const candidateArtist = metadata?.artistName?.toLowerCase().trim() || ""; - const candidateAlbum = metadata?.albumTitle?.toLowerCase().trim() || ""; - const subject = j.subject?.toLowerCase().trim() || ""; - - // Match by metadata or subject - const metaMatch = candidateArtist === normalizedArtist && candidateAlbum === normalizedAlbum; - const subjectMatch = subject.includes(normalizedArtist) && subject.includes(normalizedAlbum); - - return metaMatch || subjectMatch; - }); + if (completedJob) { + logger.debug(` Already completed: ${completedJob.id}`); + const meta = completedJob.metadata as any; + return { + jobId: completedJob.id, + batchId: completedJob.discoveryBatchId || undefined, + downloadBatchId: meta?.batchId, + spotifyImportJobId: meta?.spotifyImportJobId, + }; + } - if (matchedJobs.length > 0) { - // Pick the first one (oldest), will clean up duplicates below - job = matchedJobs[0]; - console.log(` Matched by artist/album name (found ${matchedJobs.length} matching job(s))`); - } - } + // ═══════════════════════════════════════════════════════════════ + // STEP 2: Find Active Job + // ═══════════════════════════════════════════════════════════════ + const activeJobs = await tx.downloadJob.findMany({ + where: { status: { in: ["pending", "processing"] } }, + }); - // Strategy 6: Match by subject containing artist (last resort) - if (!job && normalizedArtist) { - job = activeJobs.find((j) => { - const subject = j.subject?.toLowerCase().trim() || ""; - return subject.includes(normalizedArtist); - }); - if (job) console.log(` Matched by subject containing artist`); - } + const normalizedArtist = artistName?.toLowerCase().trim() || ""; + const normalizedAlbum = albumTitle?.toLowerCase().trim() || ""; - if (!job) { - console.log(` No matching job found for downloadId: ${downloadId}`); - return {}; - } + let job: (typeof activeJobs)[0] | undefined; - // Clean up duplicate jobs for the same artist+album - // Mark extras as completed too (they're the same logical download) - // Always search for duplicates, regardless of how we found the primary job - const jobMeta = job.metadata as any; - const jobArtist = jobMeta?.artistName?.toLowerCase().trim() || ""; - const jobAlbum = jobMeta?.albumTitle?.toLowerCase().trim() || ""; - const jobSubject = job.subject?.toLowerCase().trim() || ""; - - const duplicateJobs = activeJobs.filter((j) => { - if (j.id === job.id) return false; // Skip the matched job - - const meta = j.metadata as any; - const candArtist = meta?.artistName?.toLowerCase().trim() || ""; - const candAlbum = meta?.albumTitle?.toLowerCase().trim() || ""; - const candSubject = j.subject?.toLowerCase().trim() || ""; - - // Match by metadata - if (jobArtist && jobAlbum && candArtist === jobArtist && candAlbum === jobAlbum) { - return true; - } - - // Match by subject - if (jobSubject && candSubject === jobSubject) { - return true; - } - - // Match if subjects contain both artist and album - if (jobArtist && jobAlbum && candSubject.includes(jobArtist) && candSubject.includes(jobAlbum)) { - return true; - } - - return false; - }); - - if (duplicateJobs.length > 0) { - console.log(` Found ${duplicateJobs.length} duplicate job(s) for same album - marking as completed`); - const duplicateIds = duplicateJobs.map(j => j.id); - await prisma.downloadJob.updateMany({ - where: { id: { in: duplicateIds } }, - data: { - status: "completed", - completedAt: new Date(), - error: null, - }, - }); - } + // Strategy 1: lidarrRef + job = activeJobs.find((j) => j.lidarrRef === downloadId); + if (job) logger.debug(` Matched by lidarrRef`); - // Mark job as completed (clear any previous error messages) - await prisma.downloadJob.update({ - where: { id: job.id }, - data: { - status: "completed", - completedAt: new Date(), - error: null, // Clear any timeout errors since download succeeded - metadata: { - ...((job.metadata as any) || {}), - completedAt: new Date().toISOString(), - }, - }, - }); + // Strategy 2: lidarrAlbumId + if (!job && lidarrAlbumId) { + job = activeJobs.find( + (j) => j.lidarrAlbumId === lidarrAlbumId + ); + if (job) logger.debug(` Matched by lidarrAlbumId`); + } - console.log(` Job ${job.id} marked complete`); + // Strategy 3: previousDownloadIds + if (!job) { + job = activeJobs.find((j) => { + const meta = j.metadata as any; + return meta?.previousDownloadIds?.includes(downloadId); + }); + if (job) logger.debug(` Matched by previousDownloadIds`); + } - // Send notification for completed download (skip for discovery/import batches) - // Also skip if notification was already sent (dedup for same artist+album) - const meta = job.metadata as any; - const isDiscovery = meta?.downloadType === "discovery"; - const isSpotifyImport = !!meta?.spotifyImportJobId; - const notificationAlreadySent = meta?.notificationSent === true; - - if (!isDiscovery && !isSpotifyImport && !notificationAlreadySent) { - try { - await notificationService.notifyDownloadComplete( - job.userId, - job.subject, - undefined, - meta?.artistId - ); - - // Mark notification as sent to prevent duplicates - await prisma.downloadJob.update({ + // Strategy 4: MBID + if (!job && albumMbid) { + job = activeJobs.find((j) => j.targetMbid === albumMbid); + if (!job) { + job = activeJobs.find( + (j) => (j.metadata as any)?.lidarrMbid === albumMbid + ); + } + if (job) logger.debug(` Matched by MBID`); + } + + // Strategy 5: Name match + if (!job && normalizedArtist && normalizedAlbum) { + job = activeJobs.find((j) => { + const meta = j.metadata as any; + const candArtist = + meta?.artistName?.toLowerCase().trim() || ""; + const candAlbum = + meta?.albumTitle?.toLowerCase().trim() || ""; + const subject = j.subject?.toLowerCase().trim() || ""; + + return ( + (candArtist === normalizedArtist && + candAlbum === normalizedAlbum) || + (subject.includes(normalizedArtist) && + subject.includes(normalizedAlbum)) + ); + }); + if (job) logger.debug(` Matched by name`); + } + + if (!job) { + logger.debug(` No matching job found`); + return {}; + } + + // ═══════════════════════════════════════════════════════════════ + // STEP 3: Find and Mark Duplicates Complete (Atomic) + // ═══════════════════════════════════════════════════════════════ + const jobMeta = job.metadata as any; + const jobArtist = + jobMeta?.artistName?.toLowerCase().trim() || ""; + const jobAlbum = + jobMeta?.albumTitle?.toLowerCase().trim() || ""; + + const duplicateJobs = activeJobs.filter((j) => { + if (j.id === job!.id) return false; + const meta = j.metadata as any; + const candArtist = + meta?.artistName?.toLowerCase().trim() || ""; + const candAlbum = + meta?.albumTitle?.toLowerCase().trim() || ""; + return candArtist === jobArtist && candAlbum === jobAlbum; + }); + + if (duplicateJobs.length > 0) { + logger.debug( + ` Marking ${duplicateJobs.length} duplicate(s) complete` + ); + await tx.downloadJob.updateMany({ + where: { id: { in: duplicateJobs.map((j) => j.id) } }, + data: { + status: "completed", + completedAt: new Date(), + error: null, + }, + }); + } + + // ═══════════════════════════════════════════════════════════════ + // STEP 4: Mark Primary Job Complete + // ═══════════════════════════════════════════════════════════════ + await tx.downloadJob.update({ where: { id: job.id }, data: { + status: "completed", + completedAt: new Date(), + error: null, metadata: { - ...meta, - notificationSent: true, + ...jobMeta, + completedAt: new Date().toISOString(), }, }, }); + + logger.debug(` Job ${job.id} marked complete`); + + return { + jobId: job.id, + batchId: job.discoveryBatchId || undefined, + downloadBatchId: jobMeta?.batchId, + spotifyImportJobId: jobMeta?.spotifyImportJobId, + userId: job.userId, + subject: job.subject, + metadata: jobMeta, + }; + }, + { logPrefix: "[COMPLETE-TX]" } + ); + + // Post-transaction operations (notifications, batch completion) + if (result.jobId && result.userId) { + // Send notification + try { + const decision = + await notificationPolicyService.evaluateNotification( + result.jobId, + "complete" + ); + + if (decision.shouldNotify) { + logger.debug( + ` Sending completion notification: ${decision.reason}` + ); + await notificationService.notifyDownloadComplete( + result.userId, + result.subject, + undefined, + result.metadata?.artistId + ); + + await prisma.downloadJob.update({ + where: { id: result.jobId }, + data: { + metadata: { + ...result.metadata, + notificationSent: true, + }, + }, + }); + } else { + logger.debug( + ` Suppressing completion notification: ${decision.reason}` + ); + } } catch (notifError) { - console.error("Failed to send download notification:", notifError); + logger.error( + "Failed to evaluate/send download notification:", + notifError + ); + } + + // Check batch completion + if (result.batchId) { + const { discoverWeeklyService } = await import( + "./discoverWeekly" + ); + await discoverWeeklyService.checkBatchCompletion( + result.batchId + ); + } + + if (result.spotifyImportJobId) { + const { spotifyImportService } = await import( + "./spotifyImport" + ); + await spotifyImportService.checkImportCompletion( + result.spotifyImportJobId + ); } } - const metadata = job.metadata as any; - const downloadBatchId = metadata?.batchId as string | undefined; - const spotifyImportJobId = metadata?.spotifyImportJobId as string | undefined; - - // Check if part of discovery batch - if (job.discoveryBatchId) { - console.log(` Part of Discovery batch: ${job.discoveryBatchId}`); - // Use dynamic import to avoid circular dependency - const { discoverWeeklyService } = await import("./discoverWeekly"); - await discoverWeeklyService.checkBatchCompletion( - job.discoveryBatchId - ); - return { - jobId: job.id, - batchId: job.discoveryBatchId, - downloadBatchId, - }; - } - - // Check if part of Spotify import - if (spotifyImportJobId) { - console.log(` Part of Spotify Import: ${spotifyImportJobId}`); - // Use dynamic import to avoid circular dependency - const { spotifyImportService } = await import("./spotifyImport"); - await spotifyImportService.checkImportCompletion(spotifyImportJobId); - return { - jobId: job.id, - spotifyImportJobId, - downloadBatchId, - }; - } - - // Check if part of download batch (artist download) - if (downloadBatchId) { - console.log(` Part of download batch: ${downloadBatchId}`); - } - - return { jobId: job.id, downloadBatchId }; + return { + jobId: result.jobId, + batchId: result.batchId, + downloadBatchId: result.downloadBatchId, + spotifyImportJobId: result.spotifyImportJobId, + }; } - // Track recently processed failure events to prevent duplicate handling - private processedFailures = new Map(); - private readonly FAILURE_DEDUP_WINDOW_MS = 30000; // 30 seconds - /** * Handle import failure - LET LIDARR HANDLE RELEASE ITERATION * @@ -713,123 +847,102 @@ class SimpleDownloadManager { reason: string, albumMbid?: string ): Promise<{ retried: boolean; failed: boolean; jobId?: string }> { - console.log(`\n[RETRY] Import failed: ${downloadId}`); - console.log(` Reason: ${reason}`); + logger.debug(`\n[RETRY] Import failed: ${downloadId}`); + logger.debug(` Reason: ${reason}`); - // Deduplicate failure events - same downloadId within 30 seconds - const now = Date.now(); - const lastProcessed = this.processedFailures.get(downloadId); - if ( - lastProcessed && - now - lastProcessed < this.FAILURE_DEDUP_WINDOW_MS - ) { - console.log( - ` Duplicate failure event (within ${ - this.FAILURE_DEDUP_WINDOW_MS / 1000 - }s), skipping` - ); - return { retried: false, failed: false }; - } - this.processedFailures.set(downloadId, now); + const result = await this.withTransaction( + async (tx) => { + // ═══════════════════════════════════════════════════════════════ + // STEP 1: Find job and check for recent failure (DB-based dedup) + // ═══════════════════════════════════════════════════════════════ + const job = await tx.downloadJob.findFirst({ + where: { + OR: [ + { lidarrRef: downloadId }, + { targetMbid: albumMbid || undefined }, + ], + status: "processing", + }, + }); - // Clean up old entries periodically - if (this.processedFailures.size > 100) { - for (const [id, time] of this.processedFailures) { - if (now - time > this.FAILURE_DEDUP_WINDOW_MS * 2) { - this.processedFailures.delete(id); + if (!job) { + logger.debug(` No matching job found`); + return { retried: false, failed: false }; } - } - } - // Find all processing jobs to match against - const processingJobs = await prisma.downloadJob.findMany({ - where: { status: "processing" }, - }); + // Check for recent failure (deduplication) + const metadata = (job.metadata as any) || {}; + const lastFailureAt = metadata.lastFailureAt; + const FAILURE_DEDUP_WINDOW_MS = 30000; // 30 seconds - let job: (typeof processingJobs)[0] | undefined; + if (lastFailureAt) { + const timeSinceLastFailure = + Date.now() - new Date(lastFailureAt).getTime(); + if (timeSinceLastFailure < FAILURE_DEDUP_WINDOW_MS) { + logger.debug( + ` Duplicate failure (${Math.round( + timeSinceLastFailure / 1000 + )}s ago), skipping` + ); + return { retried: false, failed: false, jobId: job.id }; + } + } - // Strategy 1: Match by current lidarrRef - job = processingJobs.find((j) => j.lidarrRef === downloadId); - if (job) console.log(` Matched by lidarrRef`); + logger.debug(` Found job: ${job.id}`); - // Strategy 2: Match by previousDownloadIds in metadata - if (!job) { - job = processingJobs.find((j) => { - const metadata = j.metadata as any; - const prevIds = metadata?.previousDownloadIds as - | string[] - | undefined; - return prevIds?.includes(downloadId); - }); - if (job) console.log(` Matched by previousDownloadIds`); - } + // ═══════════════════════════════════════════════════════════════ + // STEP 2: Update failure tracking + // ═══════════════════════════════════════════════════════════════ + const failureCount = (metadata.failureCount || 0) + 1; + const previousDownloadIds = metadata.previousDownloadIds || []; + if (downloadId && !previousDownloadIds.includes(downloadId)) { + previousDownloadIds.push(downloadId); + } - // Strategy 3: Match by MBID - if (!job && albumMbid) { - job = processingJobs.find((j) => j.targetMbid === albumMbid); - if (job) console.log(` Matched by albumMbid`); - } + // Update status text for retry attempts + const lidarrAttempts = (metadata.lidarrAttempts || 1) + 1; + const statusText = `Lidarr #${lidarrAttempts}`; - if (!job) { - console.log( - ` No matching job found - cleaning up Lidarr queue anyway` - ); - // Still try to remove from Lidarr queue to prevent it from being stuck - await this.removeFromLidarrQueue(downloadId); - return { retried: false, failed: false }; - } + await tx.downloadJob.update({ + where: { id: job.id }, + data: { + lidarrRef: null, // Clear for next grab + metadata: { + ...metadata, + failureCount, + lastError: reason, + lastFailureAt: new Date().toISOString(), + previousDownloadIds, + lidarrAttempts, + statusText, + }, + }, + }); - console.log(` Found job: ${job.id}`); - console.log(` Album: ${job.subject}`); + logger.debug(` Failure #${failureCount} recorded`); - // ============================================ - // LET LIDARR HANDLE RELEASE ITERATION - // ============================================ - // Blocklist current release and let Lidarr search for alternatives - // skipRedownload=false means Lidarr will automatically search for another release - - const metadata = (job.metadata as any) || {}; - const failureCount = (metadata.failureCount || 0) + 1; - const previousDownloadIds = metadata.previousDownloadIds || []; - if (downloadId && !previousDownloadIds.includes(downloadId)) { - previousDownloadIds.push(downloadId); - } - - // Update job with failure tracking (no retry limit - let Lidarr exhaust options) - await prisma.downloadJob.update({ - where: { id: job.id }, - data: { - lidarrRef: null, // Clear - we'll get a new one from Lidarr's next grab - metadata: { - ...metadata, - failureCount, - lastError: reason, - lastFailureAt: new Date().toISOString(), - previousDownloadIds, - }, + return { retried: true, failed: false, jobId: job.id }; }, - }); + { logPrefix: "[FAIL-TX]" } + ); - console.log(` Failure #${failureCount} - blocklisting and letting Lidarr find alternative`); - - // Blocklist with skipRedownload=false so Lidarr searches for alternatives - await this.removeFromLidarrQueue(downloadId); - - // For Spotify Import jobs, check if this failure completes the import - // (Unlike regular downloads, we don't do fallback, so failure might mean completion) - if (metadata.spotifyImportJobId) { - // Don't check immediately - let Lidarr try alternative releases first - // The stale job cleanup will eventually mark it as exhausted + // Blocklist cleanup happens outside transaction + if (result.retried) { + logger.debug(` Blocklisting and letting Lidarr find alternative`); + await this.removeFromLidarrQueue(downloadId); + } else if (!result.jobId) { + // No job found - still clean up Lidarr queue + await this.removeFromLidarrQueue(downloadId); } - return { retried: true, failed: false, jobId: job.id }; + return result; } /** * Try the next album from the same artist when current album is exhausted * This is called when all releases for an album have been tried - * - * IMPORTANT: + * + * IMPORTANT: * - For Discovery Weekly jobs, we DON'T do same-artist fallback. * Discovery should find NEW artists, not more albums from the same artist. * - For Spotify Import jobs, we DON'T do same-artist fallback. @@ -847,62 +960,88 @@ class SimpleDownloadManager { // Discovery should prioritize ARTIST DIVERSITY - let the discovery system // find a completely different artist instead if (job.discoveryBatchId) { - console.log(`[RETRY] Discovery job - skipping same-artist fallback (diversity enforced)`); - console.log(` Discovery should find NEW artists, not more from: ${artistName}`); + logger.debug( + `[RETRY] Discovery job - skipping same-artist fallback (diversity enforced)` + ); + logger.debug( + ` Discovery should find NEW artists, not more from: ${artistName}` + ); return await this.markJobExhausted(job, reason); } // CRITICAL: For Spotify Import, DON'T try same-artist fallback // User wants the EXACT playlist, not substitutes from same artist - if (metadata.spotifyImportJobId || metadata.downloadType === "spotify_import" || metadata.noFallback) { - console.log(`[RETRY] Spotify Import job - skipping fallback (exact match required)`); - console.log(` User wants exact album: ${job.subject}`); - + if ( + metadata.spotifyImportJobId || + metadata.downloadType === "spotify_import" || + metadata.noFallback + ) { + logger.debug( + `[RETRY] Spotify Import job - skipping fallback (exact match required)` + ); + logger.debug(` User wants exact album: ${job.subject}`); + // Mark as failed and trigger completion check const result = await this.markJobExhausted(job, reason); - + // Check if import is complete if (metadata.spotifyImportJobId) { - const { spotifyImportService } = await import("./spotifyImport"); - await spotifyImportService.checkImportCompletion(metadata.spotifyImportJobId); + const { spotifyImportService } = await import( + "./spotifyImport" + ); + await spotifyImportService.checkImportCompletion( + metadata.spotifyImportJobId + ); } - + return result; } if (!artistMbid) { - console.log(` No artistMbid - cannot try other albums from same artist`); + logger.debug( + ` No artistMbid - cannot try other albums from same artist` + ); return await this.markJobExhausted(job, reason); } - console.log(`[RETRY] Trying other albums from artist: ${artistName || artistMbid}`); + logger.debug( + `[RETRY] Trying other albums from artist: ${ + artistName || artistMbid + }` + ); try { // Get albums available in LIDARR for this artist (not MusicBrainz) // MusicBrainz has many obscure albums (bootlegs, live recordings) that Lidarr can't find - const lidarrAlbums = await lidarrService.getArtistAlbums(artistMbid); - + const lidarrAlbums = await lidarrService.getArtistAlbums( + artistMbid + ); + if (!lidarrAlbums || lidarrAlbums.length === 0) { - console.log(` No albums found in Lidarr for artist`); + logger.debug(` No albums found in Lidarr for artist`); return await this.markJobExhausted(job, reason); } - console.log(` Found ${lidarrAlbums.length} albums in Lidarr for artist`); + logger.debug( + ` Found ${lidarrAlbums.length} albums in Lidarr for artist` + ); // Get albums we've already tried const triedAlbumMbids = new Set(); - + // Check for other jobs with same artist const artistJobs = await prisma.downloadJob.findMany({ where: { artistMbid: artistMbid, - status: { in: ["processing", "completed", "failed", "exhausted"] }, + status: { + in: ["processing", "completed", "failed", "exhausted"], + }, }, }); artistJobs.forEach((j: any) => { triedAlbumMbids.add(j.targetMbid); }); - + // Also add current job's album triedAlbumMbids.add(job.targetMbid); @@ -911,20 +1050,23 @@ class SimpleDownloadManager { (album: any) => !triedAlbumMbids.has(album.foreignAlbumId) ); - console.log(` Untried albums in Lidarr: ${untriedAlbums.length}`); + logger.debug(` Untried albums in Lidarr: ${untriedAlbums.length}`); if (untriedAlbums.length === 0) { - console.log(` All Lidarr albums from artist exhausted`); + logger.debug(` All Lidarr albums from artist exhausted`); return await this.markJobExhausted(job, reason); } // Pick the first untried album (prioritize studio albums over singles/EPs if possible) - const studioAlbums = untriedAlbums.filter((a: any) => - a.albumType?.toLowerCase() === 'album' || - !a.albumType + const studioAlbums = untriedAlbums.filter( + (a: any) => + a.albumType?.toLowerCase() === "album" || !a.albumType + ); + const nextAlbum = + studioAlbums.length > 0 ? studioAlbums[0] : untriedAlbums[0]; + logger.debug( + `[RETRY] Trying next album from same artist: ${nextAlbum.title}` ); - const nextAlbum = studioAlbums.length > 0 ? studioAlbums[0] : untriedAlbums[0]; - console.log(`[RETRY] Trying next album from same artist: ${nextAlbum.title}`); // Mark current job as exhausted (not failed - we're continuing with same artist) await prisma.downloadJob.update({ @@ -943,7 +1085,7 @@ class SimpleDownloadManager { const newJob = await prisma.downloadJob.create({ data: { userId: job.userId, - subject: `${artistName || 'Unknown'} - ${nextAlbum.title}`, + subject: `${artistName || "Unknown"} - ${nextAlbum.title}`, type: "album", targetMbid: albumMbid, status: "pending", @@ -963,7 +1105,7 @@ class SimpleDownloadManager { }, }); - console.log(` Created fallback job: ${newJob.id}`); + logger.debug(` Created fallback job: ${newJob.id}`); // Start the download const result = await this.startDownload( @@ -975,22 +1117,26 @@ class SimpleDownloadManager { ); if (result.success) { - console.log(` Same-artist fallback download started`); + logger.debug(` Same-artist fallback download started`); return { retried: true, failed: false, jobId: newJob.id }; } else { - console.log(` Same-artist fallback failed to start: ${result.error}`); + logger.debug( + ` Same-artist fallback failed to start: ${result.error}` + ); // The new job will be marked as failed by startDownload return { retried: false, failed: true, jobId: newJob.id }; } } catch (error: any) { - console.error(` Error trying same-artist fallback: ${error.message}`); + logger.error( + ` Error trying same-artist fallback: ${error.message}` + ); return await this.markJobExhausted(job, reason); } } /** * Mark a job as exhausted (all releases and same-artist albums tried) - * + * * IMPORTANT: Before failing, check if another job for the same album already succeeded. * This handles race conditions where duplicates exist and one succeeds. */ @@ -998,7 +1144,7 @@ class SimpleDownloadManager { job: any, reason: string ): Promise<{ retried: boolean; failed: boolean; jobId?: string }> { - console.log(`[RETRY] Job fully exhausted: ${job.id}`); + logger.debug(`[RETRY] Job fully exhausted: ${job.id}`); const meta = job.metadata as any; const artistName = meta?.artistName?.toLowerCase().trim() || ""; @@ -1016,11 +1162,15 @@ class SimpleDownloadManager { if (completedDuplicate) { const dupMeta = completedDuplicate.metadata as any; - const dupArtist = dupMeta?.artistName?.toLowerCase().trim() || ""; - const dupAlbum = dupMeta?.albumTitle?.toLowerCase().trim() || ""; - + const dupArtist = + dupMeta?.artistName?.toLowerCase().trim() || ""; + const dupAlbum = + dupMeta?.albumTitle?.toLowerCase().trim() || ""; + if (dupArtist === artistName && dupAlbum === albumTitle) { - console.log(` Found completed duplicate job ${completedDuplicate.id} - marking this as completed too`); + logger.debug( + ` Found completed duplicate job ${completedDuplicate.id} - marking this as completed too` + ); await prisma.downloadJob.update({ where: { id: job.id }, data: { @@ -1050,68 +1200,53 @@ class SimpleDownloadManager { // Check batch completion for discovery jobs if (job.discoveryBatchId) { const { discoverWeeklyService } = await import("./discoverWeekly"); - await discoverWeeklyService.checkBatchCompletion(job.discoveryBatchId); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); } - // Send failure notification ONLY if: - // 1. Not discovery/spotify import - // 2. Notification not already sent for this job - // 3. No other job for the same album has already notified - const isDiscovery = meta?.downloadType === "discovery"; - const isSpotifyImport = !!meta?.spotifyImportJobId; - const notificationAlreadySent = meta?.notificationSent === true; - - if (!isDiscovery && !isSpotifyImport && !notificationAlreadySent) { - // Check if any OTHER job for this album already sent a notification - const otherNotified = await prisma.downloadJob.findFirst({ - where: { - id: { not: job.id }, - userId: job.userId, - metadata: { - path: ["artistName"], - string_contains: meta?.artistName || "", - }, - }, - }); + // Send failure notification using policy service + try { + const decision = + await notificationPolicyService.evaluateNotification( + job.id, + "failed" + ); - let skipNotification = false; - if (otherNotified) { - const otherMeta = otherNotified.metadata as any; - if (otherMeta?.notificationSent && - otherMeta?.albumTitle?.toLowerCase() === albumTitle) { - skipNotification = true; - console.log(` Skipping notification - another job already notified for this album`); - } - } + if (decision.shouldNotify) { + logger.debug( + ` Sending failure notification: ${decision.reason}` + ); + await notificationService.notifyDownloadFailed( + job.userId, + job.subject, + reason + ); - if (!skipNotification) { - try { - await notificationService.notifyDownloadFailed( - job.userId, - job.subject, - reason - ); - - // Mark notification as sent - await prisma.downloadJob.update({ - where: { id: job.id }, - data: { - metadata: { - ...meta, - notificationSent: true, - }, + // Mark notification as sent + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + metadata: { + ...meta, + notificationSent: true, }, - }); - } catch (notifError) { - console.error("Failed to send failure notification:", notifError); - } + }, + }); + } else { + logger.debug( + ` Suppressing failure notification: ${decision.reason}` + ); } + } catch (notifError) { + logger.error( + "Failed to evaluate/send failure notification:", + notifError + ); } return { retried: false, failed: true, jobId: job.id }; } - // Timeout for "no sources" - if Lidarr hasn't grabbed anything after searching - private readonly NO_SOURCE_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes (indexer searches can be slow) /** * Mark stale jobs as failed (called by cleanup job) @@ -1131,26 +1266,46 @@ class SimpleDownloadManager { // Log to session for debugging Spotify imports if (activeJobs.length > 0) { - const spotifyJobs = activeJobs.filter(j => j.id.startsWith("spotify_")); + const spotifyJobs = activeJobs.filter((j) => + j.id.startsWith("spotify_") + ); if (spotifyJobs.length > 0) { - sessionLog('CLEANUP', `Checking ${activeJobs.length} active jobs (${spotifyJobs.length} Spotify import)`); + sessionLog( + "CLEANUP", + `Checking ${activeJobs.length} active jobs (${spotifyJobs.length} Spotify import)` + ); } } // Separate pending from processing - const pendingJobs = activeJobs.filter(j => j.status === "pending"); - const processingJobs = activeJobs.filter(j => j.status === "processing"); + const pendingJobs = activeJobs.filter((j) => j.status === "pending"); + const processingJobs = activeJobs.filter( + (j) => j.status === "processing" + ); // Handle old pending jobs first (they never started) - const stalePendingJobs = pendingJobs.filter(job => job.createdAt < pendingCutoff); + const stalePendingJobs = pendingJobs.filter( + (job) => job.createdAt < pendingCutoff + ); if (stalePendingJobs.length > 0) { - console.log(`\n⏰ Found ${stalePendingJobs.length} stuck PENDING jobs (never started)`); - sessionLog('CLEANUP', `Found ${stalePendingJobs.length} stuck PENDING jobs`); + logger.debug( + `\n⏰ Found ${stalePendingJobs.length} stuck PENDING jobs (never started)` + ); + sessionLog( + "CLEANUP", + `Found ${stalePendingJobs.length} stuck PENDING jobs` + ); for (const job of stalePendingJobs) { - console.log(` Timing out: ${job.subject} (never started - ${Math.round((Date.now() - job.createdAt.getTime()) / 60000)}m old)`); - + logger.debug( + ` Timing out: ${ + job.subject + } (never started - ${Math.round( + (Date.now() - job.createdAt.getTime()) / 60000 + )}m old)` + ); + // Mark as failed await prisma.downloadJob.update({ where: { id: job.id }, @@ -1163,8 +1318,12 @@ class SimpleDownloadManager { // Check batch completion for discovery jobs if (job.discoveryBatchId) { - const { discoverWeeklyService } = await import("./discoverWeekly"); - await discoverWeeklyService.checkBatchCompletion(job.discoveryBatchId); + const { discoverWeeklyService } = await import( + "./discoverWeekly" + ); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); } } } @@ -1186,9 +1345,17 @@ class SimpleDownloadManager { // Skip Soulseek jobs - they complete immediately with direct slsk-client // Old SLSKD jobs used source: "slskd", new direct jobs use source: "soulseek_direct" - if (metadata?.source === "slskd" || metadata?.source === "soulseek_direct") { - console.log(` ${job.subject}: Soulseek download, skipping stale check`); - sessionLog('CLEANUP', `Skipping Soulseek job: ${job.subject} (status: ${job.status})`); + if ( + metadata?.source === "slskd" || + metadata?.source === "soulseek_direct" + ) { + logger.debug( + ` ${job.subject}: Soulseek download, skipping stale check` + ); + sessionLog( + "CLEANUP", + `Skipping Soulseek job: ${job.subject} (status: ${job.status})` + ); continue; } @@ -1201,12 +1368,18 @@ class SimpleDownloadManager { // Jobs with lidarrRef = grabbed but potentially still downloading if (startedAt < importCutoff) { // Check if Lidarr is still actively downloading before timing out - const downloadStatus = await isDownloadActive(job.lidarrRef); - + const downloadStatus = await isDownloadActive( + job.lidarrRef + ); + if (downloadStatus.active) { // Still downloading - extend the timeout, don't mark as stale - console.log(` ${job.subject}: Still downloading (${downloadStatus.progress || 0}%), extending timeout`); - + logger.debug( + ` ${job.subject}: Still downloading (${ + downloadStatus.progress || 0 + }%), extending timeout` + ); + // Update the startedAt to extend the timeout await prisma.downloadJob.update({ where: { id: job.id }, @@ -1215,8 +1388,8 @@ class SimpleDownloadManager { ...metadata, startedAt: new Date().toISOString(), extendedTimeout: true, - } - } + }, + }, }); } else { // Not actively downloading - mark as stale @@ -1230,14 +1403,55 @@ class SimpleDownloadManager { return 0; } - console.log(`\n⏰ Found ${staleJobs.length} stale download jobs`); - sessionLog('CLEANUP', `Found ${staleJobs.length} stale jobs to mark as failed`); + logger.debug(`\n⏰ Found ${staleJobs.length} stale download jobs`); + sessionLog( + "CLEANUP", + `Found ${staleJobs.length} stale jobs to mark as failed` + ); // Track unique batch IDs to check const batchIds = new Set(); const downloadBatchIds = new Set(); for (const job of staleJobs) { + const metadata = (job.metadata as any) || {}; + + // Before marking as failed, check if still in retry window using policy service + try { + const policyDecision = + await notificationPolicyService.evaluateNotification( + job.id, + "timeout" + ); + + // If policy says to extend timeout (still in retry window), do so + if ( + policyDecision.reason.includes("retry window") || + policyDecision.reason.includes("extending timeout") + ) { + logger.debug( + ` ${job.subject}: ${policyDecision.reason} - extending timeout` + ); + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + metadata: { + ...metadata, + startedAt: new Date().toISOString(), + timeoutExtendedByPolicy: true, + }, + }, + }); + continue; // Skip to next job + } + } catch (policyError) { + logger.error( + ` Failed to evaluate policy for ${job.id}:`, + policyError + ); + // Continue with failure handling if policy check fails + } + const hasLidarrRef = !!job.lidarrRef; const errorMessage = hasLidarrRef ? `Import failed - download stuck for ${ @@ -1245,14 +1459,15 @@ class SimpleDownloadManager { } minutes` : `No sources found - no indexer results`; - console.log( + logger.debug( ` Timing out: ${job.subject} (${ hasLidarrRef ? "stuck import" : "no sources" })` ); - sessionLog('CLEANUP', `Marking stale: ${job.subject} - ${errorMessage}`); - - const metadata = (job.metadata as any) || {}; + sessionLog( + "CLEANUP", + `Marking stale: ${job.subject} - ${errorMessage}` + ); const artistName = metadata?.artistName?.toLowerCase().trim() || ""; const albumTitle = metadata?.albumTitle?.toLowerCase().trim() || ""; @@ -1268,11 +1483,15 @@ class SimpleDownloadManager { if (completedDuplicate) { const dupMeta = completedDuplicate.metadata as any; - const dupArtist = dupMeta?.artistName?.toLowerCase().trim() || ""; - const dupAlbum = dupMeta?.albumTitle?.toLowerCase().trim() || ""; - + const dupArtist = + dupMeta?.artistName?.toLowerCase().trim() || ""; + const dupAlbum = + dupMeta?.albumTitle?.toLowerCase().trim() || ""; + if (dupArtist === artistName && dupAlbum === albumTitle) { - console.log(` Found completed duplicate - marking this job as completed too`); + logger.debug( + ` Found completed duplicate - marking this job as completed too` + ); await prisma.downloadJob.update({ where: { id: job.id }, data: { @@ -1302,21 +1521,27 @@ class SimpleDownloadManager { const artistMbid = job.artistMbid || metadata.artistMbid; if (artistMbid && !job.discoveryBatchId) { - console.log(` Attempting same-artist fallback...`); + logger.debug(` Attempting same-artist fallback...`); try { const fallbackResult = await this.tryNextAlbumFromArtist( { ...job, metadata }, errorMessage ); if (fallbackResult.retried && fallbackResult.jobId) { - console.log(` Same-artist fallback started: ${fallbackResult.jobId}`); + logger.debug( + ` Same-artist fallback started: ${fallbackResult.jobId}` + ); replacementStarted = true; } } catch (fallbackErr: any) { - console.error(` Same-artist fallback error: ${fallbackErr.message}`); + logger.error( + ` Same-artist fallback error: ${fallbackErr.message}` + ); } } else if (job.discoveryBatchId) { - console.log(` Discovery job - letting discovery system find new artist`); + logger.debug( + ` Discovery job - letting discovery system find new artist` + ); } // If no replacement was started, mark the original job as failed @@ -1347,7 +1572,7 @@ class SimpleDownloadManager { if (batchIds.size > 0) { const { discoverWeeklyService } = await import("./discoverWeekly"); for (const batchId of batchIds) { - console.log( + logger.debug( ` Checking discovery batch completion: ${batchId}` ); await discoverWeeklyService.checkBatchCompletion(batchId); @@ -1361,7 +1586,10 @@ class SimpleDownloadManager { * Blocklist a failed release and let Lidarr search for alternatives * skipRedownload=false tells Lidarr to automatically search for another release */ - private async blocklistAndRetry(downloadId: string, _lidarrAlbumId: number) { + private async blocklistAndRetry( + downloadId: string, + _lidarrAlbumId: number + ) { try { const settings = await getSystemSettings(); if (!settings?.lidarrUrl || !settings?.lidarrApiKey) return; @@ -1390,14 +1618,16 @@ class SimpleDownloadManager { timeout: 10000, } ); - console.log(` Blocklisted release, Lidarr searching for alternative`); + logger.debug( + ` Blocklisted release, Lidarr searching for alternative` + ); } } catch (queueError: any) { // Queue item may have already been removed - console.log(` Queue cleanup: ${queueError.message}`); + logger.debug(` Queue cleanup: ${queueError.message}`); } } catch (error: any) { - console.error(` Blocklist/retry failed:`, error.message); + logger.error(` Blocklist/retry failed:`, error.message); } } @@ -1432,14 +1662,16 @@ class SimpleDownloadManager { timeout: 10000, } ); - console.log(` Removed from Lidarr queue, blocklisted, triggering new search`); + logger.debug( + ` Removed from Lidarr queue, blocklisted, triggering new search` + ); } else { - console.log( + logger.debug( ` Item not found in Lidarr queue (may already be removed)` ); } } catch (error: any) { - console.error( + logger.error( ` Failed to remove from Lidarr queue:`, error.message ); @@ -1461,7 +1693,7 @@ class SimpleDownloadManager { return { removed: 0, errors: ["Lidarr not configured"] }; } - console.log(`\nClearing Lidarr download queue...`); + logger.debug(`\nClearing Lidarr download queue...`); const queueResponse = await axios.get( `${settings.lidarrUrl}/api/v1/queue`, @@ -1477,7 +1709,7 @@ class SimpleDownloadManager { return { removed: 0, errors: [] }; } - console.log(` Found ${records.length} items in queue`); + logger.debug(` Found ${records.length} items in queue`); // Filter for failed/warning status items const failedItems = records.filter( @@ -1495,7 +1727,7 @@ class SimpleDownloadManager { return { removed: 0, errors: [] }; } - console.log(` ${failedItems.length} items have errors/warnings`); + logger.debug(` ${failedItems.length} items have errors/warnings`); for (const item of failedItems) { try { @@ -1512,7 +1744,7 @@ class SimpleDownloadManager { timeout: 10000, } ); - console.log( + logger.debug( ` Removed: ${ item.title || item.album?.title || "Unknown" }` @@ -1520,7 +1752,7 @@ class SimpleDownloadManager { removed++; } catch (error: any) { const msg = `Failed to remove ${item.id}: ${error.message}`; - console.log(` ✗ ${msg}`); + logger.debug(` ${msg}`); errors.push(msg); } } @@ -1528,7 +1760,7 @@ class SimpleDownloadManager { // Explicitly trigger album searches for removed items if (albumIdsToSearch.length > 0) { try { - console.log( + logger.debug( ` Triggering search for ${albumIdsToSearch.length} album(s)...` ); await axios.post( @@ -1542,20 +1774,20 @@ class SimpleDownloadManager { timeout: 10000, } ); - console.log( + logger.debug( ` Search triggered for alternative releases` ); } catch (searchError: any) { - console.log( + logger.debug( ` Failed to trigger search: ${searchError.message}` ); } } - console.log(` Removed ${removed} items from queue`); + logger.debug(` Removed ${removed} items from queue`); return { removed, errors }; } catch (error: any) { - console.error(` Queue cleanup failed:`, error.message); + logger.error(` Queue cleanup failed:`, error.message); return { removed, errors: [error.message] }; } } @@ -1583,29 +1815,33 @@ class SimpleDownloadManager { * Reconcile processing jobs with Lidarr * Checks if albums in "processing" state are already available in Lidarr * and marks them as completed if so (fixes missed webhook completion events) - * + * * IMPORTANT: Checks by both MBID and artist+album name to handle MBID mismatches */ - async reconcileWithLidarr(): Promise<{ reconciled: number; errors: string[] }> { - console.log(`\n[RECONCILE] Checking processing jobs against Lidarr...`); - + async reconcileWithLidarr(): Promise<{ + reconciled: number; + errors: string[]; + }> { + logger.debug(`\n[RECONCILE] Checking processing jobs against Lidarr...`); + const processingJobs = await prisma.downloadJob.findMany({ where: { status: "processing" }, }); if (processingJobs.length === 0) { - console.log(` No processing jobs to reconcile`); + logger.debug(` No processing jobs to reconcile`); return { reconciled: 0, errors: [] }; } - console.log(` Found ${processingJobs.length} processing job(s)`); + logger.debug(` Found ${processingJobs.length} processing job(s)`); let reconciled = 0; const errors: string[] = []; for (const job of processingJobs) { const metadata = job.metadata as any; - const albumMbid = job.targetMbid || metadata?.albumMbid || metadata?.lidarrMbid; + const albumMbid = + job.targetMbid || metadata?.albumMbid || metadata?.lidarrMbid; const artistName = metadata?.artistName; const albumTitle = metadata?.albumTitle; @@ -1614,17 +1850,28 @@ class SimpleDownloadManager { // Strategy 1: Check by MBID(s) if (albumMbid) { - isAvailable = await lidarrService.isAlbumAvailable(albumMbid); - + isAvailable = await lidarrService.isAlbumAvailable( + albumMbid + ); + // Also try lidarrMbid if different - if (!isAvailable && metadata?.lidarrMbid && metadata.lidarrMbid !== albumMbid) { - isAvailable = await lidarrService.isAlbumAvailable(metadata.lidarrMbid); + if ( + !isAvailable && + metadata?.lidarrMbid && + metadata.lidarrMbid !== albumMbid + ) { + isAvailable = await lidarrService.isAlbumAvailable( + metadata.lidarrMbid + ); } } // Strategy 2: Check by artist+album name (handles MBID mismatches) if (!isAvailable && artistName && albumTitle) { - isAvailable = await lidarrService.isAlbumAvailableByTitle(artistName, albumTitle); + isAvailable = await lidarrService.isAlbumAvailableByTitle( + artistName, + albumTitle + ); } // Strategy 3: Parse subject if no metadata (format: "Artist - Album") @@ -1633,13 +1880,19 @@ class SimpleDownloadManager { if (parts.length >= 2) { const parsedArtist = parts[0].trim(); const parsedAlbum = parts.slice(1).join(" - ").trim(); - isAvailable = await lidarrService.isAlbumAvailableByTitle(parsedArtist, parsedAlbum); + isAvailable = + await lidarrService.isAlbumAvailableByTitle( + parsedArtist, + parsedAlbum + ); } } if (isAvailable) { - console.log(` Job ${job.id}: Album "${job.subject}" found in Lidarr - marking complete`); - + logger.debug( + ` Job ${job.id}: Album "${job.subject}" found in Lidarr - marking complete` + ); + await prisma.downloadJob.update({ where: { id: job.id }, data: { @@ -1656,8 +1909,12 @@ class SimpleDownloadManager { // Check batch completion for discovery jobs if (job.discoveryBatchId) { - const { discoverWeeklyService } = await import("./discoverWeekly"); - await discoverWeeklyService.checkBatchCompletion(job.discoveryBatchId); + const { discoverWeeklyService } = await import( + "./discoverWeekly" + ); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); } reconciled++; @@ -1665,19 +1922,270 @@ class SimpleDownloadManager { // Only log for jobs older than 5 minutes const jobAge = Date.now() - (job.createdAt?.getTime() || 0); if (jobAge > 5 * 60 * 1000) { - console.log(` Job ${job.id}: "${job.subject}" not yet available in Lidarr (${Math.round(jobAge / 60000)}m old)`); + logger.debug( + ` Job ${job.id}: "${ + job.subject + }" not yet available in Lidarr (${Math.round( + jobAge / 60000 + )}m old)` + ); } } } catch (error: any) { const msg = `Job ${job.id}: Error checking Lidarr - ${error.message}`; - console.error(` ${msg}`); + logger.error(` ${msg}`); errors.push(msg); } } - console.log(`[RECONCILE] Reconciled ${reconciled} job(s)`); + logger.debug(`[RECONCILE] Reconciled ${reconciled} job(s)`); return { reconciled, errors }; } + + /** + * Sync with Lidarr's queue to detect cancelled/orphaned downloads + * This catches jobs that were cancelled in Lidarr's UI but webhooks didn't notify us + * + * IMPORTANT: Implements grace period to prevent false cancellations when Lidarr + * auto-retries with a different release (new downloadId). Missing downloads are + * only marked as cancelled after 3 sync checks (90 seconds), and replacement + * detection handles downloadId changes. + */ + async syncWithLidarrQueue(): Promise<{ + cancelled: number; + errors: string[]; + }> { + logger.debug( + `\n[QUEUE-SYNC] Syncing processing jobs with Lidarr queue...` + ); + + // Grace period tracking happens in metadata.queueSyncMissingCount + + const processingJobs = await prisma.downloadJob.findMany({ + where: { + status: "processing", + lidarrRef: { not: null }, // Only check jobs that have been grabbed + }, + }); + + if (processingJobs.length === 0) { + logger.debug(` No processing jobs with lidarrRef to sync`); + return { cancelled: 0, errors: [] }; + } + + logger.debug( + ` Found ${processingJobs.length} processing job(s) with lidarrRef` + ); + + try { + // Get current Lidarr queue + const { getQueue } = await import("./lidarr"); + const queueItems = await getQueue(); + + if (queueItems.length === 0) { + logger.debug(` Lidarr queue is empty`); + } else { + logger.debug(` Lidarr queue has ${queueItems.length} item(s)`); + } + + // Build set of downloadIds currently in Lidarr queue + const activeDownloadIds = new Set( + queueItems.map((item) => item.downloadId) + ); + + let cancelled = 0; + const errors: string[] = []; + + // Check each processing job + for (const job of processingJobs) { + if (!job.lidarrRef) continue; + + const metadata = job.metadata as any; + const artistName = metadata?.artistName; + const albumTitle = metadata?.albumTitle; + + // If download is found in queue, reset its missing counter and continue + if (activeDownloadIds.has(job.lidarrRef)) { + // Reset missing counter if it was previously set + if (metadata?.queueSyncMissingCount && metadata.queueSyncMissingCount > 0) { + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + metadata: { + ...metadata, + queueSyncMissingCount: 0, + lastQueueSyncFound: new Date().toISOString(), + }, + }, + }); + } + continue; + } + + // Download ID not found in queue - start grace period tracking + logger.debug( + ` Job ${job.id}: Download ID "${job.lidarrRef}" not in queue` + ); + logger.debug(` Album: ${artistName} - ${albumTitle}`); + + // Track missing download attempts for grace period + const missingKey = `missing_${job.id}`; + const missingCount = (metadata?.queueSyncMissingCount || 0) + 1; + + if (missingCount < 3) { + logger.debug( + ` Download missing from queue (attempt ${missingCount}/3) - grace period active` + ); + + // Update missing count in metadata + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + metadata: { + ...metadata, + queueSyncMissingCount: missingCount, + lastQueueSyncCheck: new Date().toISOString(), + }, + }, + }); + continue; // Don't mark as cancelled yet + } + + // After 3 checks (90 seconds), check for replacement downloads before cancelling + logger.debug( + ` Download missing after 3 checks - checking for replacement` + ); + + // Check if replacement download exists (same album, different downloadId) + // QueueItem.title typically contains "Artist - Album", so check if it includes the album name + const replacementDownload = queueItems.find((item) => { + if (!item.downloadId || !albumTitle) return false; + + const queueTitle = item.title?.toLowerCase() || ""; + const searchAlbum = albumTitle.toLowerCase(); + const searchArtist = artistName?.toLowerCase() || ""; + + // Match if queue title contains both artist and album + return ( + queueTitle.includes(searchAlbum) && + (searchArtist ? queueTitle.includes(searchArtist) : true) + ); + }); + + if (replacementDownload && replacementDownload.downloadId) { + logger.debug( + ` Replacement download found: ${replacementDownload.downloadId}` + ); + logger.debug( + ` Updating job with new downloadId (Lidarr auto-retry detected)` + ); + + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + lidarrRef: replacementDownload.downloadId, + error: null, + metadata: { + ...metadata, + previousDownloadId: job.lidarrRef, + replacementDetected: true, + replacementDetectedAt: new Date().toISOString(), + queueSyncMissingCount: 0, // Reset counter + }, + }, + }); + + continue; // Job is still active with new downloadId + } + + // No replacement found - check if the album is already downloaded + try { + let isAvailable = false; + + if (job.targetMbid) { + isAvailable = await lidarrService.isAlbumAvailable( + job.targetMbid + ); + } + + if (!isAvailable && artistName && albumTitle) { + isAvailable = + await lidarrService.isAlbumAvailableByTitle( + artistName, + albumTitle + ); + } + + if (isAvailable) { + // Album is downloaded - mark as completed + logger.debug( + ` Album found in library - marking complete` + ); + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + status: "completed", + completedAt: new Date(), + error: null, + metadata: { + ...metadata, + completedAt: new Date().toISOString(), + queueSyncCompleted: true, + queueSyncMissingCount: 0, + }, + }, + }); + cancelled++; + } else { + // Album not downloaded, not in queue, no replacement - mark as failed + logger.warn( + ` Download not found after 90s (3 checks) - marking as failed` + ); + await prisma.downloadJob.update({ + where: { id: job.id }, + data: { + status: "failed", + error: "Lidarr queue sync: Download not found after 90s (3 checks). Possible reasons: indexer timeout, quality profile mismatch, or Lidarr auto-cancelled.", + completedAt: new Date(), + lidarrRef: null, + metadata: { + ...metadata, + cancelledAt: new Date().toISOString(), + queueSyncCancelled: true, + queueSyncMissingCount: missingCount, + }, + }, + }); + + // Check batch completion for discovery jobs + if (job.discoveryBatchId) { + const { discoverWeeklyService } = await import( + "./discoverWeekly" + ); + await discoverWeeklyService.checkBatchCompletion( + job.discoveryBatchId + ); + } + + cancelled++; + } + } catch (error: any) { + const msg = `Job ${job.id}: Error checking album availability - ${error.message}`; + logger.error(` ${msg}`); + errors.push(msg); + } + } + + logger.debug(`[QUEUE-SYNC] Processed ${cancelled} orphaned job(s)`); + return { cancelled, errors }; + } catch (error: any) { + logger.error( + `[QUEUE-SYNC] Failed to sync with Lidarr queue:`, + error.message + ); + return { cancelled: 0, errors: [error.message] }; + } + } } // Singleton instance diff --git a/backend/src/services/soulseek.ts b/backend/src/services/soulseek.ts index caeec14..17eb0ec 100644 --- a/backend/src/services/soulseek.ts +++ b/backend/src/services/soulseek.ts @@ -52,9 +52,21 @@ class SoulseekService { private connecting = false; private connectPromise: Promise | null = null; private lastConnectAttempt = 0; + private lastFailedAttempt = 0; private readonly RECONNECT_COOLDOWN = 30000; // 30 seconds between reconnect attempts - private readonly DOWNLOAD_TIMEOUT = 180000; // 3 minutes per download attempt - private readonly MAX_DOWNLOAD_RETRIES = 3; // Try up to 3 different users + private readonly FAILED_RECONNECT_COOLDOWN = 5000; // 5 seconds after failed attempt + private readonly DOWNLOAD_TIMEOUT_INITIAL = 60000; // 1 minute for first attempt + private readonly DOWNLOAD_TIMEOUT_RETRY = 30000; // 30 seconds for retries + private readonly MAX_DOWNLOAD_RETRIES = 5; // Try up to 5 different users (more retries with shorter timeouts) + + // Circuit breaker for failing users + private failedUsers = new Map(); + private readonly FAILURE_THRESHOLD = 3; // Block after 3 failures + private readonly FAILURE_WINDOW = 300000; // 5 minute window + + // Concurrency tracking + private activeDownloads = 0; + private maxConcurrentDownloads = 0; // Connection health tracking private connectedAt: Date | null = null; @@ -72,12 +84,12 @@ class SoulseekService { private normalizeTrackTitle(title: string): string { // First, normalize Unicode characters to ASCII equivalents for better search matching let normalized = title - .replace(/…/g, "") // Remove ellipsis (U+2026) - files don't have this - .replace(/[''′`]/g, "'") // Smart apostrophes → ASCII apostrophe - .replace(/[""]/g, '"') // Smart quotes → ASCII quotes - .replace(/\//g, " ") // Slash → space (file names can't have /) - .replace(/[–—]/g, "-") // En/em dash → hyphen - .replace(/[×]/g, "x"); // Multiplication sign → x + .replace(/…/g, "") // Remove ellipsis (U+2026) - files don't have this + .replace(/[''′`]/g, "'") // Smart apostrophes → ASCII apostrophe + .replace(/[""]/g, '"') // Smart quotes → ASCII quotes + .replace(/\//g, " ") // Slash → space (file names can't have /) + .replace(/[–—]/g, "-") // En/em dash → hyphen + .replace(/[×]/g, "x"); // Multiplication sign → x // Remove content in parentheses that contains live/remaster/remix info const livePatterns = @@ -178,21 +190,42 @@ class SoulseekService { return this.connectPromise; } - // Cooldown between reconnect attempts (skip if forced) + // Short cooldown after FAILED attempts (5s), longer after SUCCESS (30s) const now = Date.now(); - if (!force && now - this.lastConnectAttempt < this.RECONNECT_COOLDOWN) { + + // If last successful connection was recent, respect cooldown + if (!force && this.lastConnectAttempt > 0 && + now - this.lastConnectAttempt < this.RECONNECT_COOLDOWN) { throw new Error( "Connection cooldown - please wait before retrying" ); } + + // If last FAILED attempt was very recent (5s), wait briefly + if (!force && this.lastFailedAttempt > 0 && + now - this.lastFailedAttempt < this.FAILED_RECONNECT_COOLDOWN) { + throw new Error( + "Connection recently failed - please wait before retrying" + ); + } this.connecting = true; - this.lastConnectAttempt = now; - this.connectPromise = this.connect().finally(() => { - this.connecting = false; - this.connectPromise = null; - }); + this.connectPromise = this.connect() + .then(() => { + // Only set lastConnectAttempt on SUCCESS + this.lastConnectAttempt = Date.now(); + this.lastFailedAttempt = 0; // Clear failed tracking + }) + .catch((err) => { + // Track failed attempt separately (shorter cooldown) + this.lastFailedAttempt = Date.now(); + throw err; + }) + .finally(() => { + this.connecting = false; + this.connectPromise = null; + }); return this.connectPromise; } @@ -450,9 +483,75 @@ class SoulseekService { }); } + /** + * Check if a user should be blocked due to recent failures + */ + private isUserBlocked(username: string): boolean { + const record = this.failedUsers.get(username); + if (!record) return false; + + // Clear old failures outside the window + if (Date.now() - record.lastFailure.getTime() > this.FAILURE_WINDOW) { + this.failedUsers.delete(username); + return false; + } + + return record.failures >= this.FAILURE_THRESHOLD; + } + + /** + * Record a user failure for circuit breaker + */ + private recordUserFailure(username: string): void { + const record = this.failedUsers.get(username) || { + failures: 0, + lastFailure: new Date(), + }; + record.failures++; + record.lastFailure = new Date(); + this.failedUsers.set(username, record); + + if (record.failures >= this.FAILURE_THRESHOLD) { + sessionLog( + "SOULSEEK", + `User ${username} blocked: ${record.failures} failures in ${Math.round( + this.FAILURE_WINDOW / 60000 + )}min window`, + "WARN" + ); + } + } + + /** + * Categorize download errors for smarter retry behavior + */ + private categorizeError(error: Error): { + type: "user_offline" | "timeout" | "connection" | "file_not_found" | "unknown"; + skipUser: boolean; + } { + const message = error.message.toLowerCase(); + + if (message.includes("user not exist") || message.includes("user offline")) { + return { type: "user_offline", skipUser: true }; + } + if (message.includes("timed out") || message.includes("timeout")) { + return { type: "timeout", skipUser: true }; + } + if ( + message.includes("connection refused") || + message.includes("connection reset") + ) { + return { type: "connection", skipUser: true }; + } + if (message.includes("file not found") || message.includes("no such file")) { + return { type: "file_not_found", skipUser: true }; + } + return { type: "unknown", skipUser: false }; + } + /** * Rank all search results and return sorted matches (best first) - * Filters out matches below minimum score threshold + * Filters out matches below minimum score threshold and blocked users */ private rankAllResults( results: SearchResult[], @@ -462,9 +561,11 @@ class SoulseekService { // Normalize search terms for matching const normalizedArtist = artistName .toLowerCase() + .replace(/\s*&\s*/g, " and ") .replace(/[^a-z0-9\s]/g, ""); const normalizedTitle = trackTitle .toLowerCase() + .replace(/\s*&\s*/g, " and ") .replace(/[^a-z0-9\s]/g, "") .replace(/^\d+\s*[-.]?\s*/, ""); // Remove leading track numbers @@ -476,15 +577,24 @@ class SoulseekService { .filter((w) => w.length > 2) .slice(0, 3); - const scored = results.map((file) => { + // Filter out blocked users first + const availableResults = results.filter( + (file) => !this.isUserBlocked(file.user) + ); + + const scored = availableResults.map((file) => { const filename = (file.file || "").toLowerCase(); const normalizedFilename = filename.replace(/[^a-z0-9]/g, ""); const shortFilename = filename.split(/[/\\]/).pop() || filename; let score = 0; - // Prefer files with slots available (+20) - if (file.slots) score += 20; + // Strongly prefer files with slots available (+40) + if (file.slots) score += 40; + + // Prefer high-speed peers + if (file.speed > 1000000) score += 15; // >1MB/s + else if (file.speed > 500000) score += 5; // >500KB/s // Check if filename contains artist (full or first word) if ( @@ -561,8 +671,25 @@ class SoulseekService { */ async downloadTrack( match: TrackMatch, - destPath: string + destPath: string, + attemptNumber: number = 0 ): Promise<{ success: boolean; error?: string }> { + // Track active downloads for concurrency monitoring + this.activeDownloads++; + this.maxConcurrentDownloads = Math.max( + this.maxConcurrentDownloads, + this.activeDownloads + ); + sessionLog( + "SOULSEEK", + `Active downloads: ${this.activeDownloads}/${this.maxConcurrentDownloads} max` + ); + + // Use shorter timeout for retries + const timeout = + attemptNumber === 0 + ? this.DOWNLOAD_TIMEOUT_INITIAL + : this.DOWNLOAD_TIMEOUT_RETRY; try { await this.ensureConnected(); } catch (err: any) { @@ -587,17 +714,20 @@ class SoulseekService { return new Promise((resolve) => { let resolved = false; - // Timeout handler - 3 minutes max per download attempt + // Timeout handler - progressive timeout based on attempt number const timeoutId = setTimeout(() => { if (!resolved) { resolved = true; + this.activeDownloads--; sessionLog( "SOULSEEK", - `Download timed out after ${ - this.DOWNLOAD_TIMEOUT / 1000 - }s: ${match.filename}`, + `Download timed out after ${timeout / 1000}s: ${ + match.filename + }`, "WARN" ); + // Record user failure for circuit breaker + this.recordUserFailure(match.username); // Clean up partial file if it exists if (fs.existsSync(destPath)) { try { @@ -608,7 +738,7 @@ class SoulseekService { } resolve({ success: false, error: "Download timed out" }); } - }, this.DOWNLOAD_TIMEOUT); + }, timeout); // Create a SearchResult object for the download const downloadFile: SearchResult = { @@ -629,13 +759,21 @@ class SoulseekService { if (resolved) return; // Already timed out resolved = true; clearTimeout(timeoutId); + this.activeDownloads--; if (err) { + const errorInfo = this.categorizeError(err); sessionLog( "SOULSEEK", - `Download failed: ${err.message}`, + `Download failed (${errorInfo.type}): ${err.message}`, "ERROR" ); + + // Record user failure if error indicates user issue + if (errorInfo.skipUser) { + this.recordUserFailure(match.username); + } + return resolve({ success: false, error: err.message }); } @@ -952,7 +1090,7 @@ class SoulseekService { sanitize(match.filename) ); - const result = await this.downloadTrack(match, destPath); + const result = await this.downloadTrack(match, destPath, attempt); if (result.success) { if (attempt > 0) { sessionLog( diff --git a/backend/src/services/spotify.ts b/backend/src/services/spotify.ts index db47146..442132c 100644 --- a/backend/src/services/spotify.ts +++ b/backend/src/services/spotify.ts @@ -1,4 +1,5 @@ import axios from "axios"; +import { logger } from "../utils/logger"; /** * Spotify Service @@ -84,7 +85,7 @@ class SpotifyService { for (const endpoint of endpoints) { try { - console.log(`Spotify: Fetching anonymous token from ${endpoint.url}...`); + logger.debug(`Spotify: Fetching anonymous token from ${endpoint.url}...`); const response = await axios.get(endpoint.url, { params: endpoint.params, @@ -104,15 +105,15 @@ class SpotifyService { // Anonymous tokens last about an hour this.tokenExpiry = Date.now() + 3600 * 1000; - console.log("Spotify: Got anonymous token"); + logger.debug("Spotify: Got anonymous token"); return token; } } catch (error: any) { - console.log(`Spotify: Token endpoint failed (${error.response?.status || error.message})`); + logger.debug(`Spotify: Token endpoint failed (${error.response?.status || error.message})`); } } - console.error("Spotify: All token endpoints failed - API browsing unavailable"); + logger.error("Spotify: All token endpoints failed - API browsing unavailable"); return null; } @@ -148,7 +149,7 @@ class SpotifyService { } try { - console.log(`Spotify: Fetching playlist ${playlistId}...`); + logger.debug(`Spotify: Fetching playlist ${playlistId}...`); const playlistResponse = await axios.get( `https://api.spotify.com/v1/playlists/${playlistId}`, @@ -165,7 +166,7 @@ class SpotifyService { ); const playlist = playlistResponse.data; - console.log(`Spotify: Fetched playlist "${playlist.name}" with ${playlist.tracks?.items?.length || 0} tracks`); + logger.debug(`Spotify: Fetched playlist "${playlist.name}" with ${playlist.tracks?.items?.length || 0} tracks`); const tracks: SpotifyTrack[] = []; @@ -180,7 +181,7 @@ class SpotifyService { // Debug log for tracks with Unknown Album if (albumName === "Unknown Album") { - console.log(`Spotify: Track "${track.name}" has no album data:`, JSON.stringify({ + logger.debug(`Spotify: Track "${track.name}" has no album data:`, JSON.stringify({ trackId: track.id, album: track.album, hasAlbum: !!track.album, @@ -203,7 +204,7 @@ class SpotifyService { }); } - console.log(`Spotify: Processed ${tracks.length} tracks`); + logger.debug(`Spotify: Processed ${tracks.length} tracks`); return { id: playlist.id, @@ -216,7 +217,7 @@ class SpotifyService { isPublic: playlist.public ?? true, }; } catch (error: any) { - console.error("Spotify API error:", error.response?.status, error.response?.data || error.message); + logger.error("Spotify API error:", error.response?.status, error.response?.data || error.message); // Fallback to embed HTML parsing return await this.fetchPlaylistViaEmbedHtml(playlistId); @@ -228,7 +229,7 @@ class SpotifyService { */ private async fetchPlaylistViaEmbedHtml(playlistId: string): Promise { try { - console.log("Spotify: Trying embed HTML parsing..."); + logger.debug("Spotify: Trying embed HTML parsing..."); const response = await axios.get( `https://open.spotify.com/embed/playlist/${playlistId}`, @@ -244,7 +245,7 @@ class SpotifyService { const match = html.match(/