Release v1.3.0: Multi-source downloads, audio analyzer resilience, mobile improvements

Major Features:
- Multi-source download system (Soulseek/Lidarr with fallback)
- Configurable enrichment speed control (1-5x)
- Mobile touch drag support for seek sliders
- iOS PWA media controls (Control Center, Lock Screen)
- Artist name alias resolution via Last.fm
- Circuit breaker pattern for audio analysis

Critical Fixes:
- Audio analyzer stability (non-ASCII, BrokenProcessPool, OOM)
- Discovery system race conditions and import failures
- Radio decade categorization using originalYear
- LastFM API response normalization
- Mood bucket infinite loop prevention

Security:
- Bull Board admin authentication
- Lidarr webhook signature verification
- JWT token expiration and refresh
- Encryption key validation on startup

Closes #2, #6, #9, #13, #21, #26, #31, #34, #35, #37, #40, #43
This commit is contained in:
Your Name
2026-01-06 20:07:33 -06:00
parent 8fe151a0d1
commit cc8d0f6969
242 changed files with 20562 additions and 7725 deletions

View File

@@ -1,10 +1,38 @@
# Lidify Configuration
# Copy to .env and edit as needed
# ==============================================================================
# Database Configuration
# ==============================================================================
DATABASE_URL="postgresql://lidify:lidify@localhost:5433/lidify"
# ==============================================================================
# Redis Configuration
# ==============================================================================
# Note: Redis container port is mapped to 6380 to avoid conflicts with other Redis instances
REDIS_URL="redis://localhost:6380"
# ==============================================================================
# REQUIRED: Path to your music library
# ==============================================================================
MUSIC_PATH=/path/to/your/music
# DEVELOPMENT: Use your local path (e.g., /home/user/Music)
# DOCKER: This is the HOST path that gets mounted to /music in the container
# The backend inside Docker always uses /music, not this value.
# Example: MUSIC_PATH=~/Music (container mounts as ~/Music:/music)
# ==============================================================================
# REQUIRED: Security Keys
# ==============================================================================
# Encryption key for sensitive data (API keys, passwords, 2FA secrets)
# CRITICAL: You MUST set this before starting Lidify
# Generate with: openssl rand -base64 32
SETTINGS_ENCRYPTION_KEY=
# Session secret (auto-generated if not set)
# Generate with: openssl rand -base64 32
SESSION_SECRET=
# ==============================================================================
# OPTIONAL: Customize these if needed
@@ -16,9 +44,14 @@ PORT=3030
# Timezone (default: UTC)
TZ=UTC
# Session secret (auto-generated if not set)
# Generate with: openssl rand -base64 32
SESSION_SECRET=
# Logging level (default: debug in development, warn in production)
# Options: debug, info, warn, error, silent
LOG_LEVEL=debug
# Allow public access to API documentation in production (default: false)
# Set to 'true' to make /api/docs accessible without authentication in production
# Development mode always allows public access
# DOCS_PUBLIC=true
# DockerHub username (for pulling images)
# Your DockerHub username (same as GitHub: chevron7locked)
@@ -26,3 +59,13 @@ DOCKERHUB_USERNAME=chevron7locked
# Version tag (use 'latest' or specific like 'v1.0.0')
VERSION=latest
# ==============================================================================
# OPTIONAL: Audio Analyzer CPU Control
# ==============================================================================
# Audio Analyzer CPU Control
# AUDIO_ANALYSIS_WORKERS=2 # Number of parallel worker processes (1-8)
# AUDIO_ANALYSIS_THREADS_PER_WORKER=1 # Threads per worker for TensorFlow/FFT (1-4, default 1)
# Formula: max_cpu_usage ≈ WORKERS × (THREADS_PER_WORKER + 1) × 100%
# Example: 2 workers × (1 thread + 1 overhead) = ~400% CPU (4 cores)

102
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@@ -0,0 +1,102 @@
name: Bug Report
description: Report a bug or unexpected behavior
title: "[Bug]: "
labels: ["bug", "needs triage"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to report a bug. Please fill out the information below to help us diagnose and fix the issue.
- type: textarea
id: description
attributes:
label: Bug Description
description: A clear and concise description of what the bug is.
placeholder: Describe the bug...
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Steps to Reproduce
description: Step-by-step instructions to reproduce the behavior.
placeholder: |
1. Go to '...'
2. Click on '...'
3. Scroll down to '...'
4. See error
validations:
required: true
- type: textarea
id: expected
attributes:
label: Expected Behavior
description: What did you expect to happen?
placeholder: Describe what should have happened...
validations:
required: true
- type: textarea
id: actual
attributes:
label: Actual Behavior
description: What actually happened?
placeholder: Describe what actually happened...
validations:
required: true
- type: input
id: version
attributes:
label: Lidify Version
description: What version of Lidify are you running?
placeholder: "e.g., v1.0.0, nightly-2024-01-15, or commit hash"
validations:
required: true
- type: dropdown
id: deployment
attributes:
label: Deployment Method
description: How are you running Lidify?
options:
- Docker (docker-compose)
- Docker (standalone)
- Manual/Source
- Other
validations:
required: true
- type: textarea
id: environment
attributes:
label: Environment Details
description: Any relevant environment information (OS, browser, Docker version, etc.)
placeholder: |
- OS: Ubuntu 22.04
- Docker: 24.0.5
- Browser: Firefox 120
validations:
required: false
- type: textarea
id: logs
attributes:
label: Relevant Logs
description: Please copy and paste any relevant log output. This will be automatically formatted into code.
render: shell
validations:
required: false
- type: checkboxes
id: checklist
attributes:
label: Checklist
options:
- label: I have searched existing issues to ensure this bug hasn't already been reported
required: true
- label: I am using a supported version of Lidify
required: true

5
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: Questions & Discussions
url: https://github.com/Chevron7Locked/lidify/discussions
about: Ask questions and discuss Lidify in GitHub Discussions

View File

@@ -0,0 +1,64 @@
name: Feature Request
description: Suggest a new feature or enhancement
title: "[Feature]: "
labels: ["enhancement", "needs triage"]
body:
- type: markdown
attributes:
value: |
Thanks for suggesting a feature! Please provide as much detail as possible.
- type: textarea
id: problem
attributes:
label: Problem or Use Case
description: What problem does this feature solve? What are you trying to accomplish?
placeholder: "I'm trying to... but currently..."
validations:
required: true
- type: textarea
id: solution
attributes:
label: Proposed Solution
description: Describe the feature you'd like to see implemented.
placeholder: Describe your ideal solution...
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Alternatives Considered
description: Have you considered any alternative solutions or workarounds?
placeholder: Describe alternatives you've considered...
validations:
required: false
- type: dropdown
id: scope
attributes:
label: Feature Scope
description: How big of a change is this?
options:
- Small (UI tweak, minor enhancement)
- Medium (new component, significant enhancement)
- Large (new major feature, architectural change)
validations:
required: true
- type: checkboxes
id: contribution
attributes:
label: Contribution
options:
- label: I would be willing to help implement this feature
required: false
- type: checkboxes
id: checklist
attributes:
label: Checklist
options:
- label: I have searched existing issues to ensure this hasn't already been requested
required: true

37
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,37 @@
## Description
<!-- Briefly describe what this PR does -->
## Type of Change
- [ ] Bug fix (non-breaking change that fixes an issue)
- [ ] New feature (non-breaking change that adds functionality)
- [ ] Enhancement (improvement to existing functionality)
- [ ] Documentation update
- [ ] Code cleanup / refactoring
- [ ] Other (please describe):
## Related Issues
Fixes #
## Changes Made
-
-
-
## Testing Done
- [ ] Tested locally with Docker
- [ ] Tested specific functionality:
## Screenshots (if applicable)
## Checklist
- [ ] My code follows the project's code style
- [ ] I have tested my changes locally
- [ ] I have updated documentation if needed
- [ ] My changes don't introduce new warnings
- [ ] This PR targets the `main` branch

55
.github/workflows/docker-nightly.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Nightly Build
on:
push:
branches: [main]
tags-ignore:
- "v*" # Don't trigger on version tags - docker-publish handles those
env:
IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/lidify
jobs:
build-nightly:
name: Build & Push Nightly Image
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Free up disk space
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get short SHA
id: sha
run: echo "short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and push nightly
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: |
${{ env.IMAGE_NAME }}:nightly
${{ env.IMAGE_NAME }}:nightly-${{ steps.sha.outputs.short }}
labels: |
org.opencontainers.image.revision=${{ github.sha }}
org.opencontainers.image.version=nightly-${{ steps.sha.outputs.short }}
cache-from: type=gha
cache-to: type=gha,mode=max
# ARM64 disabled due to QEMU emulation issues with npm packages
platforms: linux/amd64

48
.github/workflows/pr-checks.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name: PR Checks
on:
pull_request:
branches: [main]
types: [opened, synchronize, reopened]
jobs:
lint-frontend:
name: Lint Frontend
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: Install frontend dependencies
working-directory: frontend
run: npm ci
- name: Run ESLint on frontend
working-directory: frontend
run: npm run lint
build-docker:
name: Docker Build Check
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build Docker image (no push)
uses: docker/build-push-action@v5
with:
context: .
push: false
tags: lidify:pr-check
cache-from: type=gha
cache-to: type=gha,mode=max

20
.gitignore vendored
View File

@@ -13,6 +13,7 @@
.env.test.local
.env.production.local
.env.local
.roomodes
# =============================================================================
# Dependencies
@@ -35,7 +36,7 @@ ENV/
**/.venv/
# =============================================================================
# Build Outputs
# Build
# =============================================================================
# Frontend (Next.js)
frontend/.next/
@@ -316,6 +317,17 @@ bower_components
reset-and-setup.sh
organize-singles.sh
# AI Context Management (keep locally, don't push to GitHub)
context_portal/
# Internal Development Documentation (keep locally, don't push to GitHub)
docs/
**/docs/
# Temporary commit messages
COMMIT_MESSAGE.txt
# Backend development logs
backend/logs/
@@ -349,6 +361,8 @@ soularr/
**/.cursor/
.vscode/
**/.vscode/
.roo/
**/.roo/
# =============================================================================
# Android Build Artifacts (contains local paths)
@@ -381,3 +395,7 @@ backend/mullvad/
# Android signing
lidify.keystore
keystore.b64
.aider*
issues/
plans/

203
CHANGELOG.md Normal file
View File

@@ -0,0 +1,203 @@
# Changelog
All notable changes to Lidify will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.3.0] - 2026-01-06
### Added
- Multi-source download system with configurable Soulseek/Lidarr primary source and fallback options
- Configurable enrichment speed control (1-5x concurrency) in Settings → Cache & Automation
- Stale job cleanup button in Settings to clear stuck Discovery batches and downloads
- Mobile touch drag support for seek sliders on all player views
- Skip ±30s buttons for audiobooks/podcasts on mobile players
- iOS PWA media controls support (Control Center and Lock Screen)
- Artist name alias resolution via Last.fm (e.g., "of mice" → "Of Mice & Men")
- Library grid now supports 8 columns on ultra-wide displays (2xl breakpoint)
- Artist discography sorting options (Year/Date Added)
- Enrichment failure notifications with retry/skip modal
- Download history deduplication to prevent duplicate entries
- Utility function for normalizing API responses to arrays (`normalizeToArray`) - @tombatossals
- Keyword-based mood scoring for standard analysis mode tracks - @RustyJonez
- Global and route-level error boundaries for better error handling
- React Strict Mode for development quality checks
- Next.js image optimization enabled by default
- Mobile-aware animation rendering (GalaxyBackground disables particles on mobile)
- Accessibility motion preferences support (`prefers-reduced-motion`)
- Lazy loading for heavy components (MoodMixer, VibeOverlay, MetadataEditor)
- Bundle analyzer tooling (`npm run analyze`)
- Loading states for all 10 priority routes
- Skip links for keyboard navigation (WCAG 2.1 AA compliance)
- ARIA attributes on all interactive controls and navigation elements
- Toast notifications with ARIA live regions for screen readers
- Bull Board admin dashboard authentication (requires admin user)
- Lidarr webhook signature verification with configurable secret
- Encryption key validation on startup (prevents insecure defaults)
- Session cookie security (httpOnly, sameSite=strict, secure in production)
- Swagger API documentation authentication in production
- JWT token expiration (24h access tokens, 30d refresh tokens)
- JWT refresh token endpoint (`/api/auth/refresh`)
- Token version validation (password changes invalidate existing tokens)
- Download queue reconciliation on server startup (marks stale jobs as failed)
- Redis batch operations for cache warmup (MULTI/EXEC pipelining)
- Memory-efficient database-level shuffle (`ORDER BY RANDOM() LIMIT n`)
- Dynamic import caching in queue cleaner (lazy-load pattern)
- Database index for `DownloadJob.targetMbid` field
- PWA install prompt dismissal persistence (7-day cooldown)
### Fixed
- **Critical:** Audio analyzer crashes on libraries with non-ASCII filenames ([#6](https://github.com/Chevron7Locked/lidify/issues/6))
- **Critical:** Audio analyzer BrokenProcessPool after ~1900 tracks ([#21](https://github.com/Chevron7Locked/lidify/issues/21))
- **Critical:** Audio analyzer OOM kills with aggressive worker auto-scaling ([#26](https://github.com/Chevron7Locked/lidify/issues/26))
- **Critical:** Audio analyzer model downloads and volume mount conflicts ([#2](https://github.com/Chevron7Locked/lidify/issues/2))
- Radio stations playing songs from wrong decades due to remaster dates ([#43](https://github.com/Chevron7Locked/lidify/issues/43))
- Manual metadata editing failing with 500 errors ([#9](https://github.com/Chevron7Locked/lidify/issues/9))
- Active downloads not resolving after Lidarr successfully imports ([#31](https://github.com/Chevron7Locked/lidify/issues/31))
- Discovery playlist downloads failing for artists with large catalogs ([#34](https://github.com/Chevron7Locked/lidify/issues/34))
- Discovery batches stuck in "downloading" status indefinitely
- Audio analyzer rhythm extraction failures on short/silent audio ([#13](https://github.com/Chevron7Locked/lidify/issues/13))
- "Of Mice & Men" artist name truncated to "Of Mice" during scanning
- Edition variant albums (Remastered, Deluxe) failing with "No releases available"
- Downloads stuck in "Lidarr #1" state for 5 minutes before failing
- Download duplicate prevention race condition causing 10+ duplicate jobs
- Lidarr downloads incorrectly cancelled during temporary network issues
- Discovery Weekly track durations showing "NaN:NaN"
- Artist name search ampersand handling ("Earth, Wind & Fire")
- Vibe overlay display issues on mobile devices
- Pagination scroll behavior (now scrolls to top instead of bottom)
- LastFM API crashes when receiving single objects instead of arrays ([#37](https://github.com/Chevron7Locked/lidify/issues/37)) - @tombatossals
- Mood bucket infinite loop for tracks analyzed in standard mode ([#40](https://github.com/Chevron7Locked/lidify/issues/40)) - @RustyJonez
- Playlist visibility toggle not properly syncing hide/show state - @tombatossals
- Audio player time display showing current time exceeding total duration (e.g., "58:00 / 54:34")
- Progress bar could exceed 100% for long-form media with stale metadata
- Enrichment P2025 errors when retrying enrichment for deleted entities
- Download settings fallback not resetting when changing primary source
- SeekSlider touch events bubbling to parent OverlayPlayer swipe handlers
- Audiobook/podcast position showing 0:00 after page refresh instead of saved progress
- Volume slider showing no visual fill indicator for current level
- PWA install prompt reappearing after user dismissal
### Changed
- Audio analyzer default workers reduced from auto-scale to 2 (memory conservative)
- Audio analyzer Docker memory limits: 6GB limit, 2GB reservation
- Download status polling intervals: 5s (active) / 10s (idle) / 30s (none), previously 15s
- Library pagination options changed to 24/40/80/200 (divisible by 8-column grid)
- Lidarr download failure detection now has 90-second grace period (3 checks)
- Lidarr catalog population timeout increased from 45s to 60s
- Download notifications now use API-driven state instead of local pending state
- Enrichment stop button now gracefully finishes current item before stopping
- Per-album enrichment triggers immediately instead of waiting for batch completion
- Lidarr edition variant detection now proactive (enables `anyReleaseOk` before first search)
- Discovery system now uses AcquisitionService for unified album/track acquisition
- Podcast and audiobook time display now shows time remaining instead of total duration
- Edition variant albums automatically fall back to base title search when edition-specific search fails
- Stale pending downloads cleaned up after 2 minutes (was indefinite)
- Download source detection now prioritizes actual service availability over user preference
### Removed
- Artist delete buttons hidden on mobile to prevent accidental deletion
- Audio analyzer models volume mount (shadowed built-in models)
### Database Migrations Required
```bash
# Run Prisma migrations
cd backend
npx prisma migrate deploy
```
**New Schema Fields:**
- `Album.originalYear` - Stores original release year (separate from remaster dates)
- `SystemSettings.enrichmentConcurrency` - User-configurable enrichment speed (1-5)
- `SystemSettings.downloadSource` - Primary download source selection
- `SystemSettings.primaryFailureFallback` - Fallback behavior on primary source failure
- `SystemSettings.lidarrWebhookSecret` - Shared secret for Lidarr webhook signature verification
- `User.tokenVersion` - Version number for JWT token invalidation on password change
- `DownloadJob.targetMbid` - Index added for improved query performance
**Backfill Script (Optional):**
```bash
# Backfill originalYear for existing albums
cd backend
npx ts-node scripts/backfill-original-year.ts
```
### Breaking Changes
- None - All changes are backward compatible
### Security
- **Critical:** Bull Board admin dashboard now requires authenticated admin user
- **Critical:** Lidarr webhooks verify signature/secret before processing requests
- **Critical:** Encryption key validation on startup prevents insecure defaults
- Session cookies use secure settings in production (httpOnly, sameSite=strict, secure)
- Swagger API documentation requires authentication in production (unless `DOCS_PUBLIC=true`)
- JWT tokens have proper expiration (24h access, 30d refresh) with refresh token support
- Password changes invalidate all existing tokens via tokenVersion increment
- Transaction-based download job creation prevents race conditions
- Enrichment stop control no longer bypassed by worker state
- Download queue webhook handlers use Serializable isolation transactions
- Webhook race conditions protected with exponential backoff retry logic
---
## Release Notes
When deploying this update:
1. **Backup your database** before running migrations
2. **Set required environment variable** (if not already set):
```bash
# Generate secure encryption key
SETTINGS_ENCRYPTION_KEY=$(openssl rand -base64 32)
```
3. Run `npx prisma migrate deploy` in the backend directory
4. Optionally run the originalYear backfill script for era mix accuracy:
```bash
cd backend
npx ts-node scripts/backfill-original-year.ts
```
5. Clear Docker volumes for audio-analyzer if experiencing model issues:
```bash
docker volume rm lidify_audio_analyzer_models 2>/dev/null || true
docker compose build audio-analyzer --no-cache
```
6. Review Settings → Downloads for new multi-source download options
7. Review Settings → Cache for new enrichment speed control
8. Configure Lidarr webhook secret in Settings for webhook signature verification (recommended)
9. Review Settings → Security for JWT token settings
### Known Issues
- Pre-existing TypeScript errors in spotifyImport.ts matchTrack method (unrelated to this release)
- Simon & Garfunkel artist name may be truncated due to short second part (edge case, not blocking)
### Contributors
Big thanks to everyone who contributed, tested, and helped make this release happen:
- @tombatossals - LastFM API normalization utility ([#39](https://github.com/Chevron7Locked/lidify/pull/39)), playlist visibility toggle fix ([#49](https://github.com/Chevron7Locked/lidify/pull/49))
- @RustyJonez - Mood bucket standard mode keyword scoring ([#47](https://github.com/Chevron7Locked/lidify/pull/47))
- @iamiq - Audio analyzer crash reporting ([#2](https://github.com/Chevron7Locked/lidify/issues/2))
- @volcs0 - Memory pressure testing ([#26](https://github.com/Chevron7Locked/lidify/issues/26))
- @Osiriz - Long-running analysis testing ([#21](https://github.com/Chevron7Locked/lidify/issues/21))
- @hessonam - Non-ASCII character testing ([#6](https://github.com/Chevron7Locked/lidify/issues/6))
- @niles - RhythmExtractor edge case reporting ([#13](https://github.com/Chevron7Locked/lidify/issues/13))
- @TheChrisK - Metadata editor bug reporting ([#9](https://github.com/Chevron7Locked/lidify/issues/9))
- @lizar93 - Discovery playlist testing ([#34](https://github.com/Chevron7Locked/lidify/issues/34))
- @brokenglasszero - Mood tags feature verification ([#35](https://github.com/Chevron7Locked/lidify/issues/35))
And all users who reported bugs, tested fixes, and provided feedback!
---
For detailed technical implementation notes, see [docs/PENDING_DEPLOY.md](docs/PENDING_DEPLOY.md).

63
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,63 @@
# Contributing to Lidify
First off, thanks for taking the time to contribute! 🎉
## Getting Started
1. Fork the repository
2. Clone your fork locally
3. Set up the development environment (see README.md)
4. Create a new branch from `main` for your changes
## Branch Strategy
All development happens on the `main` branch:
- **All PRs should target `main`**
- Every push to `main` triggers a nightly Docker build
- Stable releases are created via version tags
## Making Contributions
### Bug Fixes
1. Check existing issues to see if the bug has been reported
2. If not, open a bug report issue first
3. Fork, branch, fix, and submit a PR referencing the issue
### Small Enhancements
1. Open a feature request issue to discuss first
2. Keep changes focused and minimal
### Large Features
Please open an issue to discuss before starting work.
## Code Style
### Frontend
The frontend uses ESLint. Before submitting a PR:
```bash
cd frontend
npm run lint
```
### Backend
Follow existing code patterns and TypeScript conventions.
## Pull Request Process
1. **Target the `main` branch**
2. Fill out the PR template completely
3. Ensure the Docker build check passes
4. Wait for review - we'll provide feedback or approve
## Questions?
Open a Discussion thread for questions that aren't bugs or feature requests.
Thanks for contributing!

View File

@@ -48,35 +48,73 @@ RUN pip3 install --no-cache-dir --break-system-packages \
psycopg2-binary
# Download Essentia ML models (~200MB total) - these enable Enhanced vibe matching
# IMPORTANT: Using MusiCNN models to match analyzer.py expectations
RUN echo "Downloading Essentia ML models for Enhanced vibe matching..." && \
# Base embedding model (required for all predictions)
curl -L --progress-bar -o /app/models/discogs-effnet-bs64-1.pb \
"https://essentia.upf.edu/models/feature-extractors/discogs-effnet/discogs-effnet-bs64-1.pb" && \
# Mood models
curl -L --progress-bar -o /app/models/mood_happy-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_happy/mood_happy-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_sad-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_sad/mood_sad-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_relaxed-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_relaxed/mood_relaxed-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_aggressive-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_aggressive/mood_aggressive-discogs-effnet-1.pb" && \
# Arousal and Valence (key for vibe matching)
curl -L --progress-bar -o /app/models/mood_arousal-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_arousal/mood_arousal-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/mood_valence-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_valence/mood_valence-discogs-effnet-1.pb" && \
# Danceability and Voice/Instrumental
curl -L --progress-bar -o /app/models/danceability-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/danceability/danceability-discogs-effnet-1.pb" && \
curl -L --progress-bar -o /app/models/voice_instrumental-discogs-effnet-1.pb \
"https://essentia.upf.edu/models/classification-heads/voice_instrumental/voice_instrumental-discogs-effnet-1.pb" && \
# Base MusiCNN embedding model (required for all predictions)
curl -L --progress-bar -o /app/models/msd-musicnn-1.pb \
"https://essentia.upf.edu/models/autotagging/msd/msd-musicnn-1.pb" && \
# Mood classification heads (using MusiCNN architecture)
curl -L --progress-bar -o /app/models/mood_happy-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_happy/mood_happy-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/mood_sad-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_sad/mood_sad-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/mood_relaxed-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_relaxed/mood_relaxed-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/mood_aggressive-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_aggressive/mood_aggressive-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/mood_party-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_party/mood_party-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/mood_acoustic-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_acoustic/mood_acoustic-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/mood_electronic-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/mood_electronic/mood_electronic-msd-musicnn-1.pb" && \
# Other classification heads
curl -L --progress-bar -o /app/models/danceability-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/danceability/danceability-msd-musicnn-1.pb" && \
curl -L --progress-bar -o /app/models/voice_instrumental-msd-musicnn-1.pb \
"https://essentia.upf.edu/models/classification-heads/voice_instrumental/voice_instrumental-msd-musicnn-1.pb" && \
echo "ML models downloaded successfully" && \
ls -lh /app/models/
# Copy audio analyzer script
COPY services/audio-analyzer/analyzer.py /app/audio-analyzer/
# Create database readiness check script
RUN cat > /app/wait-for-db.sh << 'EOF'
#!/bin/bash
TIMEOUT=${1:-120}
COUNTER=0
echo "[wait-for-db] Waiting for database schema (timeout: ${TIMEOUT}s)..."
# Quick check for schema ready flag
if [ -f /data/.schema_ready ]; then
echo "[wait-for-db] Schema ready flag found, verifying connection..."
fi
while [ $COUNTER -lt $TIMEOUT ]; do
if PGPASSWORD=lidify psql -h localhost -U lidify -d lidify -c "SELECT 1 FROM \"Track\" LIMIT 1" > /dev/null 2>&1; then
echo "[wait-for-db] ✓ Database is ready and schema exists!"
exit 0
fi
if [ $((COUNTER % 15)) -eq 0 ]; then
echo "[wait-for-db] Still waiting... (${COUNTER}s elapsed)"
fi
sleep 1
COUNTER=$((COUNTER + 1))
done
echo "[wait-for-db] ERROR: Database schema not ready after ${TIMEOUT}s"
echo "[wait-for-db] Listing available tables:"
PGPASSWORD=lidify psql -h localhost -U lidify -d lidify -c "\dt" 2>&1 || echo "Could not list tables"
exit 1
EOF
RUN chmod +x /app/wait-for-db.sh && \
sed -i 's/\r$//' /app/wait-for-db.sh
# ============================================
# BACKEND BUILD
# ============================================
@@ -164,9 +202,11 @@ stderr_logfile_maxbytes=0
priority=20
[program:backend]
command=/bin/bash -c "sleep 5 && cd /app/backend && npx tsx src/index.ts"
command=/bin/bash -c "/app/wait-for-db.sh 120 && cd /app/backend && npx tsx src/index.ts"
autostart=true
autorestart=true
autorestart=unexpected
startretries=3
startsecs=10
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
@@ -186,9 +226,11 @@ environment=NODE_ENV="production",BACKEND_URL="http://localhost:3006",PORT="3030
priority=40
[program:audio-analyzer]
command=/bin/bash -c "sleep 15 && cd /app/audio-analyzer && python3 analyzer.py"
command=/bin/bash -c "/app/wait-for-db.sh 120 && cd /app/audio-analyzer && python3 analyzer.py"
autostart=true
autorestart=true
autorestart=unexpected
startretries=3
startsecs=10
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
@@ -271,32 +313,53 @@ MIGRATIONS_EXIST=$(gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT FROM
# Check if User table exists (indicates existing data)
USER_TABLE_EXIST=$(gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'User')" 2>/dev/null || echo "f")
# Handle rename migration for existing databases
echo "Checking if rename migration needs to be marked as applied..."
if gosu postgres psql -d lidify -tAc "SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='SystemSettings' AND column_name='soulseekFallback');" 2>/dev/null | grep -q 't'; then
echo "Old column exists, marking migration as applied..."
gosu postgres psql -d lidify -c "INSERT INTO \"_prisma_migrations\" (id, checksum, finished_at, migration_name, logs, rolled_back_at, started_at, applied_steps_count) VALUES (gen_random_uuid(), '', NOW(), '20250101000000_rename_soulseek_fallback', '', NULL, NOW(), 1) ON CONFLICT DO NOTHING;" 2>/dev/null || true
fi
if [ "$MIGRATIONS_EXIST" = "t" ]; then
# Normal migration flow - migrations table exists
echo "Migration history found, running migrate deploy..."
npx prisma migrate deploy 2>&1 || {
echo "WARNING: Migration failed, but database preserved."
echo "You may need to manually resolve migration issues."
}
if ! npx prisma migrate deploy 2>&1; then
echo "FATAL: Database migration failed! Check logs above."
exit 1
fi
elif [ "$USER_TABLE_EXIST" = "t" ]; then
# Database has data but no migrations table - needs baseline
echo "Existing database detected without migration history."
echo "Creating baseline from current schema..."
# Mark the init migration as already applied (baseline)
npx prisma migrate resolve --applied 20251130000000_init 2>&1 || true
npx prisma migrate resolve --applied 20241130000000_init 2>&1 || true
# Now run any subsequent migrations
npx prisma migrate deploy 2>&1 || {
echo "WARNING: Migration after baseline failed."
echo "Database preserved - check migration status manually."
}
if ! npx prisma migrate deploy 2>&1; then
echo "FATAL: Migration after baseline failed!"
exit 1
fi
else
# Fresh database - run migrations normally
echo "Fresh database detected, running initial migrations..."
npx prisma migrate deploy 2>&1 || {
echo "WARNING: Initial migration failed."
echo "Check database connection and schema."
}
if ! npx prisma migrate deploy 2>&1; then
echo "FATAL: Initial migration failed. Check database connection and schema."
exit 1
fi
fi
echo "✓ Migrations completed successfully"
# Verify schema exists before starting services
echo "Verifying database schema..."
if ! gosu postgres psql -d lidify -c "SELECT 1 FROM \"Track\" LIMIT 1" >/dev/null 2>&1; then
echo "FATAL: Track table does not exist after migration!"
echo "Database schema verification failed. Container will exit."
exit 1
fi
echo "✓ Schema verification passed"
# Create flag file for wait-for-db.sh
touch /data/.schema_ready
echo "✓ Schema ready flag created"
# Stop PostgreSQL (supervisord will start it)
gosu postgres $PG_BIN/pg_ctl -D /data/postgres -w stop
@@ -338,7 +401,12 @@ SETTINGS_ENCRYPTION_KEY=$SETTINGS_ENCRYPTION_KEY
ENVEOF
echo "Starting Lidify..."
exec /usr/bin/supervisord -c /etc/supervisor/supervisord.conf
exec env \
NODE_ENV=production \
DATABASE_URL="postgresql://lidify:lidify@localhost:5432/lidify" \
SESSION_SECRET="$SESSION_SECRET" \
SETTINGS_ENCRYPTION_KEY="$SETTINGS_ENCRYPTION_KEY" \
/usr/bin/supervisord -c /etc/supervisor/supervisord.conf
EOF
# Fix Windows line endings (CRLF -> LF) and make executable

View File

@@ -276,6 +276,34 @@ Lidify will begin scanning your music library automatically. Depending on the si
---
## Release Channels
Lidify offers two release channels to match your stability preferences:
### 🟢 Stable (Recommended)
Production-ready releases. Updated when new stable versions are released.
```bash
docker pull chevron7locked/lidify:latest
# or specific version
docker pull chevron7locked/lidify:v1.2.0
```
### 🔴 Nightly (Development)
Latest development build. Built on every push to main.
⚠️ **Not recommended for production** - may be unstable or broken.
```bash
docker pull chevron7locked/lidify:nightly
```
**For contributors:** See [`CONTRIBUTING.md`](CONTRIBUTING.md) for information on submitting pull requests and contributing to Lidify.
---
## Configuration
### Environment Variables
@@ -287,8 +315,7 @@ The unified Lidify container handles most configuration automatically. Here are
| `SESSION_SECRET` | Auto-generated | Session encryption key (recommended to set for persistence across restarts) |
| `TZ` | `UTC` | Timezone for the container |
| `LIDIFY_CALLBACK_URL` | `http://host.docker.internal:3030` | URL for Lidarr webhook callbacks (see [Lidarr integration](#lidarr)) |
| `NUM_WORKERS` | Auto-generated (50% of CPU Cores) | Limit the amount of workers being used in Auto Analysis. |
| `NUM_WORKERS` | `2` | Number of parallel workers for audio analysis |
The music library path is configured via Docker volume mount (`-v /path/to/music:/music`).
@@ -314,42 +341,40 @@ ALLOWED_ORIGINS=http://localhost:3030,https://lidify.yourdomain.com
Lidify uses several sensitive environment variables. Never commit your `.env` file.
| Variable | Purpose | Required |
| ------------------------- | ------------------------------ | ------------------ |
| `SESSION_SECRET` | Session encryption (32+ chars) | Yes |
| `SETTINGS_ENCRYPTION_KEY` | Encrypts stored credentials | Recommended |
| `SOULSEEK_USERNAME` | Soulseek login | If u sing Soulseek |
| `SOULSEEK_PASSWORD`- | Soulseek password - | If using S-oulseek |
| `LIDARR_AP I_KEY` | Lidarr integration | If using L idarr |
| `OPENAI_API_KEY` | AI features | Optional |
| `LASTFM_API_KEY ` | Artist recommendations | Optional |
| `FANART_API_KEY` | Artist images | Optional |
| Variable | Purpose | Required |
| ------------------------- | ------------------------------ | ----------------- |
| `SESSION_SECRET` | Session encryption (32+ chars) | Yes |
| `SETTINGS_ENCRYPTION_KEY` | Encrypts stored credentials | Recommended |
| `SOULSEEK_USERNAME` | Soulseek login | If using Soulseek |
| `SOULSEEK_PASSWORD` | Soulseek password | If using Soulseek |
| `LIDARR_API_KEY` | Lidarr integration | If using Lidarr |
| `OPENAI_API_KEY` | AI features | Optional |
| `LASTFM_API_KEY` | Artist recommendations | Optional |
| `FANART_API_KEY` | Artist images | Optional |
### VPN Configurati on (Optional)
### VPN Configuration (Optional)
If using Mullvad VPN for Soulseek:
- Place Wi reGuard config in `ba ckend/mullvad/` (gitignored)
- Never commit VPN cred entials or private keys
- The `*.conf` and `key.txt` patterns are already in .git ignore
- Place WireGuard config in `backend/mullvad/` (gitignored)
- Never commit VPN credentials or private keys
- The `*.conf` and `key.txt` patterns are already in .gitignore
### Generating Secrets
```bas h
```bash
# Generate a secure session secret
openss l rand - base64 32
openssl rand -base64 32
# Generate encryption key
openssl rand -hex 32
```
### Network
Sec urity
### Network Security
- Lidify is designed for self-hosted LAN use
- For exte rnal access, use a reverse proxy with HTTPS
- C o nfigure `ALLOWED_ORIGINS` for your domain
- For external access, use a reverse proxy with HTTPS
- Configure `ALLOWED_ORIGINS` for your domain
---
@@ -359,12 +384,12 @@ Lidify works beautifully on its own, but it becomes even more powerful when conn
### Lidarr
Connect Lidify to your Lidarr instance to request and downloa d new music directly from the app.
Connect Lidify to your Lidarr instance to request and download new music directly from the app.
**What you get:**
- Browse artists and albums you don't own
- Request downloads with a single click
- Request downloads with a single click
- Discover Weekly playlists that automatically download new recommendations
- Automatic library sync when Lidarr finishes importing

View File

@@ -1,12 +1,12 @@
{
"name": "lidify-backend",
"version": "1.0.0",
"version": "1.3.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "lidify-backend",
"version": "1.0.0",
"version": "1.3.0",
"license": "GPL-3.0",
"dependencies": {
"@bull-board/api": "^6.14.2",
@@ -37,6 +37,7 @@
"jsonwebtoken": "^9.0.2",
"music-metadata": "^11.10.0",
"node-cron": "^4.2.1",
"p-limit": "^7.2.0",
"p-queue": "^9.0.0",
"podcast-index-api": "^1.1.10",
"qrcode": "^1.5.4",
@@ -51,6 +52,7 @@
},
"devDependencies": {
"@types/bcrypt": "^5.0.2",
"@types/cors": "^2.8.19",
"@types/express": "^4.17.21",
"@types/express-session": "^1.17.10",
"@types/jsonwebtoken": "^9.0.10",
@@ -105,9 +107,9 @@
}
},
"node_modules/@borewit/text-codec": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.2.0.tgz",
"integrity": "sha512-X999CKBxGwX8wW+4gFibsbiNdwqmdQEXmUejIWaIqdrHBgS5ARIOOeyiQbHjP9G58xVEPcuvP6VwwH3A0OFTOA==",
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.2.1.tgz",
"integrity": "sha512-k7vvKPbf7J2fZ5klGRD9AeKfUvojuZIQ3BT5u7Jfv+puwXkUBUT5PVyMDfJZpy30CBDXGMgw7fguK/lpOMBvgw==",
"license": "MIT",
"funding": {
"type": "github",
@@ -115,25 +117,25 @@
}
},
"node_modules/@bull-board/api": {
"version": "6.15.0",
"resolved": "https://registry.npmjs.org/@bull-board/api/-/api-6.15.0.tgz",
"integrity": "sha512-z8qLZ4uv83hZNu+0YnHzhVoWv1grULuYh80FdC2xXLg8M1EwsOZD9cJ5CNpgBFqHb+NVByTmf5FltIvXdOU8tQ==",
"version": "6.16.2",
"resolved": "https://registry.npmjs.org/@bull-board/api/-/api-6.16.2.tgz",
"integrity": "sha512-d3kDf91FeMw/wYp8FOZJjX4hVqZEmomXtYgNRdZc0a5gTR2bmomvpwJtNBinu2lyIRFoX/Rxilz+CZ6xyw3drQ==",
"license": "MIT",
"dependencies": {
"redis-info": "^3.1.0"
},
"peerDependencies": {
"@bull-board/ui": "6.15.0"
"@bull-board/ui": "6.16.2"
}
},
"node_modules/@bull-board/express": {
"version": "6.15.0",
"resolved": "https://registry.npmjs.org/@bull-board/express/-/express-6.15.0.tgz",
"integrity": "sha512-c/nnxr5evLNgqoSSEvTwPb+6WaTB3PN3Bq2oMTBtwCUJlZr+s1UX7gx0wVIYHjeZyUdYR7fX7hhh2cRLO5vqeg==",
"version": "6.16.2",
"resolved": "https://registry.npmjs.org/@bull-board/express/-/express-6.16.2.tgz",
"integrity": "sha512-RYjWmRpixgoRVJf4/iZuwbst4EML8EnL+S2vyIn6uE0iqCXFBV63oEYJAhoEA7P50IrrktVBOU2/qTdsbih18g==",
"license": "MIT",
"dependencies": {
"@bull-board/api": "6.15.0",
"@bull-board/ui": "6.15.0",
"@bull-board/api": "6.16.2",
"@bull-board/ui": "6.16.2",
"ejs": "^3.1.10",
"express": "^5.2.0"
}
@@ -430,12 +432,12 @@
}
},
"node_modules/@bull-board/ui": {
"version": "6.15.0",
"resolved": "https://registry.npmjs.org/@bull-board/ui/-/ui-6.15.0.tgz",
"integrity": "sha512-bb/j6VMq2cfPoE/ZiUO7AcYTL0IjtxvKxkYV0zu+i1pc+JEv3ct4BItCII57knJR/YjZKGmdfr079KJFvzXC5A==",
"version": "6.16.2",
"resolved": "https://registry.npmjs.org/@bull-board/ui/-/ui-6.16.2.tgz",
"integrity": "sha512-L8ylgyJqiCrngne9GvX6zqALXnSLhzGBRaPnmO5y7Ev6K9w84EkcfhzcNw4qNH4SJAdcOm3HVf15dBU2Wznbug==",
"license": "MIT",
"dependencies": {
"@bull-board/api": "6.15.0"
"@bull-board/api": "6.16.2"
}
},
"node_modules/@derhuerst/http-basic": {
@@ -454,9 +456,9 @@
}
},
"node_modules/@emnapi/runtime": {
"version": "1.7.1",
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.7.1.tgz",
"integrity": "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==",
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz",
"integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==",
"license": "MIT",
"optional": true,
"dependencies": {
@@ -1497,9 +1499,9 @@
}
},
"node_modules/@ioredis/commands": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.4.0.tgz",
"integrity": "sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ==",
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.5.0.tgz",
"integrity": "sha512-eUgLqrMf8nJkZxT24JvVRrQya1vZkQh8BBeYNwGDqa5I0VUi8ACx7uFvAaLxintokpTenkK6DASvo/bvNbBGow==",
"license": "MIT"
},
"node_modules/@jsdevtools/ono": {
@@ -1861,6 +1863,16 @@
"@types/node": "*"
}
},
"node_modules/@types/cors": {
"version": "2.8.19",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz",
"integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/express": {
"version": "4.17.25",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.25.tgz",
@@ -3018,9 +3030,9 @@
}
},
"node_modules/file-type": {
"version": "21.1.1",
"resolved": "https://registry.npmjs.org/file-type/-/file-type-21.1.1.tgz",
"integrity": "sha512-ifJXo8zUqbQ/bLbl9sFoqHNTNWbnPY1COImFfM6CCy7z+E+jC1eY9YfOKkx0fckIg+VljAy2/87T61fp0+eEkg==",
"version": "21.3.0",
"resolved": "https://registry.npmjs.org/file-type/-/file-type-21.3.0.tgz",
"integrity": "sha512-8kPJMIGz1Yt/aPEwOsrR97ZyZaD1Iqm8PClb1nYFclUCkBi0Ma5IsYNQzvSFS9ib51lWyIw5mIT9rWzI/xjpzA==",
"license": "MIT",
"dependencies": {
"@tokenizer/inflate": "^0.4.1",
@@ -3604,12 +3616,12 @@
"license": "ISC"
},
"node_modules/ioredis": {
"version": "5.8.2",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.8.2.tgz",
"integrity": "sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q==",
"version": "5.9.0",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.9.0.tgz",
"integrity": "sha512-T3VieIilNumOJCXI9SDgo4NnF6sZkd6XcmPi6qWtw4xqbt8nNz/ZVNiIH1L9puMTSHZh1mUWA4xKa2nWPF4NwQ==",
"license": "MIT",
"dependencies": {
"@ioredis/commands": "1.4.0",
"@ioredis/commands": "1.5.0",
"cluster-key-slot": "^1.1.0",
"debug": "^4.3.4",
"denque": "^2.1.0",
@@ -4096,9 +4108,9 @@
}
},
"node_modules/music-metadata": {
"version": "11.10.3",
"resolved": "https://registry.npmjs.org/music-metadata/-/music-metadata-11.10.3.tgz",
"integrity": "sha512-j0g/x4cNNZW6I5gdcPAY+GFkJY9WHTpkFDMBJKQLxJQyvSfQbXm57fTE3haGFFuOzCgtsTd4Plwc49Sn9RacDQ==",
"version": "11.10.5",
"resolved": "https://registry.npmjs.org/music-metadata/-/music-metadata-11.10.5.tgz",
"integrity": "sha512-G0i86zpL7AARmZx8XEkHBVf7rJMQDFfGEFc1C83//rKHGuaK0gwxmNNeo9mjm4g07KUwoT0s0dW7g5QwZhi+qQ==",
"funding": [
{
"type": "github",
@@ -4111,14 +4123,14 @@
],
"license": "MIT",
"dependencies": {
"@borewit/text-codec": "^0.2.0",
"@borewit/text-codec": "^0.2.1",
"@tokenizer/token": "^0.3.0",
"content-type": "^1.0.5",
"debug": "^4.4.3",
"file-type": "^21.1.1",
"file-type": "^21.2.0",
"media-typer": "^1.1.0",
"strtok3": "^10.3.4",
"token-types": "^6.1.1",
"token-types": "^6.1.2",
"uint8array-extras": "^1.5.0"
},
"engines": {
@@ -4315,15 +4327,15 @@
}
},
"node_modules/p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-7.2.0.tgz",
"integrity": "sha512-ATHLtwoTNDloHRFFxFJdHnG6n2WUeFjaR8XQMFdKIv0xkXjrER8/iG9iu265jOM95zXHAfv9oTkqhrfbIzosrQ==",
"license": "MIT",
"dependencies": {
"p-try": "^2.0.0"
"yocto-queue": "^1.2.1"
},
"engines": {
"node": ">=6"
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
@@ -4341,10 +4353,25 @@
"node": ">=8"
}
},
"node_modules/p-locate/node_modules/p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
"license": "MIT",
"dependencies": {
"p-try": "^2.0.0"
},
"engines": {
"node": ">=6"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/p-queue": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-9.0.1.tgz",
"integrity": "sha512-RhBdVhSwJb7Ocn3e8ULk4NMwBEuOxe+1zcgphUy9c2e5aR/xbEsdVXxHJ3lynw6Qiqu7OINEyHlZkiblEpaq7w==",
"version": "9.1.0",
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-9.1.0.tgz",
"integrity": "sha512-O/ZPaXuQV29uSLbxWBGGZO1mCQXV2BLIwUr59JUU9SoH76mnYvtms7aafH/isNSNGwuEfP6W/4xD0/TJXxrizw==",
"license": "MIT",
"dependencies": {
"eventemitter3": "^5.0.1",
@@ -4516,9 +4543,9 @@
}
},
"node_modules/qs": {
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.1.0"
@@ -5211,12 +5238,12 @@
}
},
"node_modules/token-types": {
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.1.tgz",
"integrity": "sha512-kh9LVIWH5CnL63Ipf0jhlBIy0UsrMj/NJDfpsy1SqOXlLKEVyXXYrnFxFT1yOOYVGBSApeVnjPw/sBz5BfEjAQ==",
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.2.tgz",
"integrity": "sha512-dRXchy+C0IgK8WPC6xvCHFRIWYUbqqdEIKPaKo/AcTUNzwLTK6AH7RjdLWsEZcAN/TBdtfUw3PYEgPr5VPr6ww==",
"license": "MIT",
"dependencies": {
"@borewit/text-codec": "^0.1.0",
"@borewit/text-codec": "^0.2.1",
"@tokenizer/token": "^0.3.0",
"ieee754": "^1.2.1"
},
@@ -5228,16 +5255,6 @@
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/token-types/node_modules/@borewit/text-codec": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.1.1.tgz",
"integrity": "sha512-5L/uBxmjaCIX5h8Z+uu+kA9BQLkc/Wl06UGR5ajNRxu+/XjonB5i8JpgFMrPj3LXTCPA0pv8yxUvbUi+QthGGA==",
"license": "MIT",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Borewit"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
@@ -5535,6 +5552,18 @@
"node": ">=6"
}
},
"node_modules/yocto-queue": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz",
"integrity": "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==",
"license": "MIT",
"engines": {
"node": ">=12.20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/z-schema": {
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/z-schema/-/z-schema-5.0.5.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "lidify-backend",
"version": "1.2.0",
"version": "1.3.0",
"description": "Lidify backend API server",
"license": "GPL-3.0",
"repository": {
@@ -46,6 +46,7 @@
"jsonwebtoken": "^9.0.2",
"music-metadata": "^11.10.0",
"node-cron": "^4.2.1",
"p-limit": "^7.2.0",
"p-queue": "^9.0.0",
"podcast-index-api": "^1.1.10",
"qrcode": "^1.5.4",
@@ -60,6 +61,7 @@
},
"devDependencies": {
"@types/bcrypt": "^5.0.2",
"@types/cors": "^2.8.19",
"@types/express": "^4.17.21",
"@types/express-session": "^1.17.10",
"@types/jsonwebtoken": "^9.0.10",

View File

@@ -0,0 +1,10 @@
-- Rename soulseekFallback to primaryFailureFallback (idempotent)
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'SystemSettings' AND column_name = 'soulseekFallback'
) THEN
ALTER TABLE "SystemSettings" RENAME COLUMN "soulseekFallback" TO "primaryFailureFallback";
END IF;
END $$;

View File

@@ -0,0 +1,11 @@
-- Add tokenVersion to User table (idempotent)
DO $$
BEGIN
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'User')
AND NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'User' AND column_name = 'tokenVersion'
) THEN
ALTER TABLE "User" ADD COLUMN "tokenVersion" INTEGER NOT NULL DEFAULT 0;
END IF;
END $$;

View File

@@ -0,0 +1,11 @@
-- Create targetMbid index on DownloadJob (idempotent)
DO $$
BEGIN
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'DownloadJob')
AND NOT EXISTS (
SELECT 1 FROM pg_indexes
WHERE tablename = 'DownloadJob' AND indexname = 'DownloadJob_targetMbid_idx'
) THEN
CREATE INDEX "DownloadJob_targetMbid_idx" ON "DownloadJob"("targetMbid");
END IF;
END $$;

View File

@@ -19,6 +19,7 @@ CREATE TABLE "User" (
"twoFactorSecret" TEXT,
"twoFactorRecoveryCodes" TEXT,
"moodMixParams" JSONB,
"tokenVersion" INTEGER NOT NULL DEFAULT 0,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
@@ -78,7 +79,7 @@ CREATE TABLE "SystemSettings" (
"downloadRetryAttempts" INTEGER NOT NULL DEFAULT 3,
"transcodeCacheMaxGb" INTEGER NOT NULL DEFAULT 10,
"downloadSource" TEXT NOT NULL DEFAULT 'soulseek',
"soulseekFallback" TEXT NOT NULL DEFAULT 'none',
"primaryFailureFallback" TEXT NOT NULL DEFAULT 'none',
"updatedAt" TIMESTAMP(3) NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
@@ -826,6 +827,9 @@ CREATE INDEX "DownloadJob_lidarrRef_idx" ON "DownloadJob"("lidarrRef");
-- CreateIndex
CREATE INDEX "DownloadJob_artistMbid_idx" ON "DownloadJob"("artistMbid");
-- CreateIndex
CREATE INDEX "DownloadJob_targetMbid_idx" ON "DownloadJob"("targetMbid");
-- CreateIndex
CREATE INDEX "ListeningState_userId_idx" ON "ListeningState"("userId");

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "SystemSettings" ADD COLUMN "enrichmentConcurrency" INTEGER NOT NULL DEFAULT 1;

View File

@@ -0,0 +1,27 @@
-- AlterTable
ALTER TABLE "Album" ADD COLUMN "displayTitle" TEXT,
ADD COLUMN "displayYear" INTEGER,
ADD COLUMN "hasUserOverrides" BOOLEAN NOT NULL DEFAULT false,
ADD COLUMN "userCoverUrl" TEXT,
ADD COLUMN "userGenres" JSONB;
-- AlterTable
ALTER TABLE "Artist" ADD COLUMN "displayName" TEXT,
ADD COLUMN "hasUserOverrides" BOOLEAN NOT NULL DEFAULT false,
ADD COLUMN "userGenres" JSONB,
ADD COLUMN "userHeroUrl" TEXT,
ADD COLUMN "userSummary" TEXT;
-- AlterTable
ALTER TABLE "Track" ADD COLUMN "displayTitle" TEXT,
ADD COLUMN "displayTrackNo" INTEGER,
ADD COLUMN "hasUserOverrides" BOOLEAN NOT NULL DEFAULT false;
-- CreateIndex
CREATE INDEX "Album_hasUserOverrides_idx" ON "Album"("hasUserOverrides");
-- CreateIndex
CREATE INDEX "Artist_hasUserOverrides_idx" ON "Artist"("hasUserOverrides");
-- CreateIndex
CREATE INDEX "Track_hasUserOverrides_idx" ON "Track"("hasUserOverrides");

View File

@@ -0,0 +1,128 @@
-- Migration: Add search vector triggers for podcasts and audiobooks
-- This migration creates PostgreSQL functions and triggers to automatically
-- populate and maintain search vectors for podcast and audiobook content
-- ============================================================================
-- PODCAST SEARCH VECTOR FUNCTION
-- ============================================================================
-- Function to generate Podcast search vector from title, author, and description
CREATE OR REPLACE FUNCTION podcast_search_vector_trigger() RETURNS trigger AS $$
BEGIN
-- Combine title, author, and description into search vector
-- Using setweight: title (A), author (B), description (C)
NEW."searchVector" :=
setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(NEW.author, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(NEW.description, '')), 'C');
RETURN NEW;
END
$$ LANGUAGE plpgsql;
-- Create trigger to auto-update Podcast search vector
DROP TRIGGER IF EXISTS podcast_search_vector_update ON "Podcast";
CREATE TRIGGER podcast_search_vector_update
BEFORE INSERT OR UPDATE OF title, author, description
ON "Podcast"
FOR EACH ROW
EXECUTE FUNCTION podcast_search_vector_trigger();
-- ============================================================================
-- PODCAST EPISODE SEARCH VECTOR FUNCTION
-- ============================================================================
-- Function to generate PodcastEpisode search vector from title and description
CREATE OR REPLACE FUNCTION podcast_episode_search_vector_trigger() RETURNS trigger AS $$
BEGIN
-- Combine title and description into search vector
-- Using setweight: title (A), description (B)
NEW."searchVector" :=
setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(NEW.description, '')), 'B');
RETURN NEW;
END
$$ LANGUAGE plpgsql;
-- Create trigger to auto-update PodcastEpisode search vector
DROP TRIGGER IF EXISTS podcast_episode_search_vector_update ON "PodcastEpisode";
CREATE TRIGGER podcast_episode_search_vector_update
BEFORE INSERT OR UPDATE OF title, description
ON "PodcastEpisode"
FOR EACH ROW
EXECUTE FUNCTION podcast_episode_search_vector_trigger();
-- ============================================================================
-- AUDIOBOOK SEARCH VECTOR FUNCTION
-- ============================================================================
-- Function to generate Audiobook search vector from title, author, narrator, series, and description
CREATE OR REPLACE FUNCTION audiobook_search_vector_trigger() RETURNS trigger AS $$
BEGIN
-- Combine title, author/narrator/series, and description into search vector
-- Using setweight: title (A), author/narrator/series (B), description (C)
NEW."searchVector" :=
setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(NEW.author, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(NEW.narrator, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(NEW.series, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(NEW.description, '')), 'C');
RETURN NEW;
END
$$ LANGUAGE plpgsql;
-- Create trigger to auto-update Audiobook search vector
DROP TRIGGER IF EXISTS audiobook_search_vector_update ON "Audiobook";
CREATE TRIGGER audiobook_search_vector_update
BEFORE INSERT OR UPDATE OF title, author, narrator, series, description
ON "Audiobook"
FOR EACH ROW
EXECUTE FUNCTION audiobook_search_vector_trigger();
-- ============================================================================
-- ADD SEARCH VECTOR COLUMNS
-- ============================================================================
-- Add searchVector column to Podcast table
ALTER TABLE "Podcast" ADD COLUMN IF NOT EXISTS "searchVector" tsvector;
-- Add searchVector column to PodcastEpisode table
ALTER TABLE "PodcastEpisode" ADD COLUMN IF NOT EXISTS "searchVector" tsvector;
-- Add searchVector column to Audiobook table
ALTER TABLE "Audiobook" ADD COLUMN IF NOT EXISTS "searchVector" tsvector;
-- ============================================================================
-- CREATE GIN INDEXES
-- ============================================================================
-- Create GIN index on Podcast search vector
CREATE INDEX IF NOT EXISTS "Podcast_searchVector_idx" ON "Podcast" USING GIN ("searchVector");
-- Create GIN index on PodcastEpisode search vector
CREATE INDEX IF NOT EXISTS "PodcastEpisode_searchVector_idx" ON "PodcastEpisode" USING GIN ("searchVector");
-- Create GIN index on Audiobook search vector
CREATE INDEX IF NOT EXISTS "Audiobook_searchVector_idx" ON "Audiobook" USING GIN ("searchVector");
-- ============================================================================
-- POPULATE EXISTING RECORDS
-- ============================================================================
-- Update all existing Podcasts to populate their search vectors
UPDATE "Podcast"
SET "searchVector" =
setweight(to_tsvector('english', COALESCE(title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(author, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(description, '')), 'C');
-- Update all existing PodcastEpisodes to populate their search vectors
UPDATE "PodcastEpisode"
SET "searchVector" =
setweight(to_tsvector('english', COALESCE(title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(description, '')), 'B');
-- Update all existing Audiobooks to populate their search vectors
UPDATE "Audiobook"
SET "searchVector" =
setweight(to_tsvector('english', COALESCE(title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(author, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(narrator, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(series, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(description, '')), 'C');

View File

@@ -0,0 +1,32 @@
-- CreateTable
CREATE TABLE "EnrichmentFailure" (
"id" TEXT NOT NULL,
"entityType" TEXT NOT NULL,
"entityId" TEXT NOT NULL,
"entityName" TEXT,
"errorMessage" TEXT,
"errorCode" TEXT,
"retryCount" INTEGER NOT NULL DEFAULT 0,
"maxRetries" INTEGER NOT NULL DEFAULT 3,
"firstFailedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"lastFailedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"skipped" BOOLEAN NOT NULL DEFAULT false,
"skippedAt" TIMESTAMP(3),
"resolved" BOOLEAN NOT NULL DEFAULT false,
"resolvedAt" TIMESTAMP(3),
"metadata" JSONB,
CONSTRAINT "EnrichmentFailure_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "EnrichmentFailure_entityType_resolved_idx" ON "EnrichmentFailure"("entityType", "resolved");
-- CreateIndex
CREATE INDEX "EnrichmentFailure_skipped_idx" ON "EnrichmentFailure"("skipped");
-- CreateIndex
CREATE INDEX "EnrichmentFailure_lastFailedAt_idx" ON "EnrichmentFailure"("lastFailedAt");
-- CreateIndex
CREATE UNIQUE INDEX "EnrichmentFailure_entityType_entityId_key" ON "EnrichmentFailure"("entityType", "entityId");

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Album" ADD COLUMN "originalYear" INTEGER;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "SystemSettings" ADD COLUMN "lidarrWebhookSecret" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Track" ADD COLUMN "analysisStartedAt" TIMESTAMP(3);

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "SystemSettings" ADD COLUMN "audioAnalyzerWorkers" INTEGER NOT NULL DEFAULT 2;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "SystemSettings" ADD COLUMN "lastfmApiKey" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "SystemSettings" ADD COLUMN "soulseekConcurrentDownloads" INTEGER NOT NULL DEFAULT 4;

View File

@@ -18,6 +18,7 @@ model User {
twoFactorSecret String? // TOTP secret (encrypted)
twoFactorRecoveryCodes String? // Recovery codes (encrypted, comma-separated hashed codes)
moodMixParams Json? // Saved mood mix parameters for "Your Mood Mix"
tokenVersion Int @default(0) // Incremented on password change to invalidate tokens
createdAt DateTime @default(now())
plays Play[]
@@ -77,9 +78,10 @@ model SystemSettings {
// === Download Services ===
// Lidarr
lidarrEnabled Boolean @default(true)
lidarrUrl String? @default("http://localhost:8686")
lidarrApiKey String? // Encrypted
lidarrEnabled Boolean @default(true)
lidarrUrl String? @default("http://localhost:8686")
lidarrApiKey String? // Encrypted
lidarrWebhookSecret String? // Encrypted - Shared secret for webhook verification
// === AI Services ===
// OpenAI (for future AI features)
@@ -92,6 +94,9 @@ model SystemSettings {
fanartEnabled Boolean @default(false)
fanartApiKey String? // Encrypted
// Last.fm (optional user override - app ships with default key)
lastfmApiKey String? // Encrypted
// === Media Services ===
// Audiobookshelf
audiobookshelfEnabled Boolean @default(false)
@@ -118,12 +123,15 @@ model SystemSettings {
maxConcurrentDownloads Int @default(3)
downloadRetryAttempts Int @default(3)
transcodeCacheMaxGb Int @default(10) // Transcode cache size limit in GB
enrichmentConcurrency Int @default(1) // 1-5, number of parallel enrichment workers
audioAnalyzerWorkers Int @default(2) // 1-8, number of parallel audio analysis workers
soulseekConcurrentDownloads Int @default(4) // 1-10, concurrent Soulseek downloads
// === Download Preferences ===
// Primary download source: "soulseek" (per-track) or "lidarr" (full albums)
downloadSource String @default("soulseek")
// When soulseek is primary and fails: "none" (skip) or "lidarr" (download full album)
soulseekFallback String @default("none")
downloadSource String @default("soulseek")
// Fallback when primary source fails: "none" (skip), "lidarr" (full album), or "soulseek" (track-based)
primaryFailureFallback String @default("none")
updatedAt DateTime @updatedAt
createdAt DateTime @default(now())
@@ -143,6 +151,13 @@ model Artist {
enrichmentStatus String @default("pending") // pending, enriching, completed, failed
searchVector Unsupported("tsvector")?
// User overrides (optional, takes display precedence)
displayName String? // User-provided display name
userSummary String? @db.Text // User-provided bio
userHeroUrl String? // User-uploaded/linked image
userGenres Json? // User-modified genres (array of strings)
hasUserOverrides Boolean @default(false) // Quick check flag
albums Album[]
similarFrom SimilarArtist[] @relation("FromArtist")
similarTo SimilarArtist[] @relation("ToArtist")
@@ -151,6 +166,7 @@ model Artist {
@@index([name])
@@index([normalizedName])
@@index([searchVector], type: Gin)
@@index([hasUserOverrides])
}
model Album {
@@ -158,7 +174,8 @@ model Album {
rgMbid String @unique // release group MBID
artistId String
title String
year Int?
year Int? // File metadata date (may be remaster)
originalYear Int? // Original release date from MusicBrainz
coverUrl String?
primaryType String // Album, EP, Single, Live, Compilation
label String? // Record label (from MusicBrainz)
@@ -167,6 +184,13 @@ model Album {
location AlbumLocation @default(LIBRARY) // LIBRARY or DISCOVER
searchVector Unsupported("tsvector")?
// User overrides (optional, takes display precedence)
displayTitle String? // User-provided display title
displayYear Int? // User-provided year
userCoverUrl String? // User-uploaded/linked cover
userGenres Json? // User-modified genres (array of strings)
hasUserOverrides Boolean @default(false) // Quick check flag
artist Artist @relation(fields: [artistId], references: [id], onDelete: Cascade)
tracks Track[]
@@ -174,6 +198,7 @@ model Album {
@@index([location])
@@index([title])
@@index([searchVector], type: Gin)
@@index([hasUserOverrides])
}
model Track {
@@ -190,6 +215,11 @@ model Track {
fileModified DateTime // mtime for change detection
fileSize Int // File size in bytes
// User overrides (optional, takes display precedence)
displayTitle String? // User-provided display title
displayTrackNo Int? // User-provided track number
hasUserOverrides Boolean @default(false) // Quick check flag
// === Audio Analysis (Essentia) ===
// Rhythm
bpm Float? // Beats per minute (e.g., 120.5)
@@ -235,13 +265,14 @@ model Track {
lastfmTags String[] // ["chill", "workout", "sad", "90s"]
// Analysis Metadata
analysisStatus String @default("pending") // pending, processing, completed, failed
analysisVersion String? // Essentia version used
analysisMode String? // 'standard' or 'enhanced'
analyzedAt DateTime?
analysisError String? // Error message if failed
analysisRetryCount Int @default(0) // Number of retry attempts
updatedAt DateTime @updatedAt
analysisStatus String @default("pending") // pending, processing, completed, failed
analysisStartedAt DateTime? // When processing began (for timeout detection)
analysisVersion String? // Essentia version used
analysisMode String? // 'standard' or 'enhanced'
analyzedAt DateTime?
analysisError String? // Error message if failed
analysisRetryCount Int @default(0) // Number of retry attempts
updatedAt DateTime @updatedAt
album Album @relation(fields: [albumId], references: [id], onDelete: Cascade)
plays Play[]
@@ -272,6 +303,7 @@ model Track {
@@index([arousal])
@@index([acousticness])
@@index([instrumentalness])
@@index([hasUserOverrides])
}
// Transcoded file cache for audio streaming
@@ -479,6 +511,7 @@ model DownloadJob {
@@index([startedAt])
@@index([lidarrRef])
@@index([artistMbid])
@@index([targetMbid])
}
model ListeningState {
@@ -640,6 +673,9 @@ model Audiobook {
audioUrl String // Audiobookshelf streaming URL
libraryId String? // Audiobookshelf library ID
// Full-text search
searchVector Unsupported("tsvector")?
// Timestamps
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -649,6 +685,7 @@ model Audiobook {
@@index([author])
@@index([series])
@@index([lastSyncedAt])
@@index([searchVector], type: Gin)
}
model PodcastRecommendation {
@@ -676,46 +713,49 @@ model PodcastRecommendation {
// ============================================
model Podcast {
id String @id @default(cuid())
feedUrl String @unique
id String @id @default(cuid())
feedUrl String @unique
title String
author String?
description String? @db.Text
imageUrl String? // Original feed image URL
localCoverPath String? // Local cached cover image path
itunesId String? @unique
description String? @db.Text
imageUrl String? // Original feed image URL
localCoverPath String? // Local cached cover image path
itunesId String? @unique
language String?
explicit Boolean @default(false)
episodeCount Int @default(0)
lastRefreshed DateTime @default(now())
refreshInterval Int @default(3600) // seconds (1 hour default)
autoRefresh Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
explicit Boolean @default(false)
episodeCount Int @default(0)
lastRefreshed DateTime @default(now())
refreshInterval Int @default(3600) // seconds (1 hour default)
autoRefresh Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
searchVector Unsupported("tsvector")?
episodes PodcastEpisode[]
subscriptions PodcastSubscription[]
@@index([itunesId])
@@index([lastRefreshed])
@@index([searchVector], type: Gin)
}
model PodcastEpisode {
id String @id @default(cuid())
id String @id @default(cuid())
podcastId String
guid String // RSS GUID (unique per feed)
guid String // RSS GUID (unique per feed)
title String
description String? @db.Text
audioUrl String // Direct MP3/audio URL from RSS
duration Int @default(0) // seconds
description String? @db.Text
audioUrl String // Direct MP3/audio URL from RSS
duration Int @default(0) // seconds
publishedAt DateTime
episodeNumber Int?
season Int?
imageUrl String? // Episode-specific image URL
localCoverPath String? // Local cached episode cover
fileSize Int? // bytes
mimeType String? @default("audio/mpeg")
createdAt DateTime @default(now())
imageUrl String? // Episode-specific image URL
localCoverPath String? // Local cached episode cover
fileSize Int? // bytes
mimeType String? @default("audio/mpeg")
createdAt DateTime @default(now())
searchVector Unsupported("tsvector")?
podcast Podcast @relation(fields: [podcastId], references: [id], onDelete: Cascade)
progress PodcastProgress[]
@@ -723,6 +763,7 @@ model PodcastEpisode {
@@unique([podcastId, guid])
@@index([podcastId, publishedAt])
@@index([searchVector], type: Gin)
}
// User podcast subscriptions
@@ -976,3 +1017,30 @@ model Notification {
@@index([userId, read])
@@index([createdAt])
}
// ============================================
// Enrichment Failure Tracking
// ============================================
model EnrichmentFailure {
id String @id @default(cuid())
entityType String // artist, track, audio
entityId String // Artist/Track ID
entityName String? // Display name
errorMessage String? // Human-readable error
errorCode String? // Machine-readable code
retryCount Int @default(0)
maxRetries Int @default(3)
firstFailedAt DateTime @default(now())
lastFailedAt DateTime @default(now())
skipped Boolean @default(false)
skippedAt DateTime?
resolved Boolean @default(false)
resolvedAt DateTime?
metadata Json? // Additional context (filePath, etc.)
@@unique([entityType, entityId])
@@index([entityType, resolved])
@@index([skipped])
@@index([lastFailedAt])
}

View File

@@ -0,0 +1,146 @@
#!/usr/bin/env ts-node
/**
* Backfill Script: Populate originalYear for existing albums
*
* This script populates the new originalYear field for albums that don't have it yet.
*
* Strategy:
* 1. For albums already enriched with MusicBrainz data, copy year to originalYear
* (since enrichment overwrites year with the original release date)
* 2. Skip temporary albums (temp-* MBIDs)
*
* Usage:
* npx ts-node scripts/backfill-original-year.ts [--dry-run]
*
* Options:
* --dry-run Show what would be updated without making changes
*/
import { PrismaClient } from "@prisma/client";
const prisma = new PrismaClient();
async function backfillOriginalYear(dryRun: boolean = false) {
console.log("=== Backfill originalYear Script ===\n");
console.log(
`Mode: ${
dryRun ? "DRY RUN (no changes)" : "LIVE (will update database)"
}\n`
);
try {
// Find albums that need backfilling
const albumsToBackfill = await prisma.album.findMany({
where: {
originalYear: null,
year: { not: null }, // Only albums that have a year value
rgMbid: { not: { startsWith: "temp-" } }, // Skip temporary albums
},
select: {
id: true,
rgMbid: true,
title: true,
year: true,
originalYear: true,
artist: {
select: {
name: true,
},
},
},
});
console.log(`Found ${albumsToBackfill.length} albums to backfill\n`);
if (albumsToBackfill.length === 0) {
console.log("✓ No albums need backfilling. All done!");
return;
}
// Show sample of albums to be updated
console.log("Sample of albums to be updated:");
albumsToBackfill.slice(0, 5).forEach((album, idx) => {
console.log(
` ${idx + 1}. "${album.title}" by ${album.artist.name}`
);
console.log(
` Current: year=${album.year}, originalYear=${album.originalYear}`
);
console.log(` Will set: originalYear=${album.year}\n`);
});
if (albumsToBackfill.length > 5) {
console.log(
` ... and ${albumsToBackfill.length - 5} more albums\n`
);
}
if (dryRun) {
console.log(
"DRY RUN: No changes made. Remove --dry-run to apply updates."
);
return;
}
// Confirm before proceeding in live mode
console.log(
`Proceeding with backfill of ${albumsToBackfill.length} albums...\n`
);
// Process in batches to avoid overwhelming the database
const BATCH_SIZE = 100;
let processed = 0;
let updated = 0;
for (let i = 0; i < albumsToBackfill.length; i += BATCH_SIZE) {
const batch = albumsToBackfill.slice(i, i + BATCH_SIZE);
// Update each album in the batch
const updatePromises = batch.map((album) =>
prisma.album.update({
where: { id: album.id },
data: { originalYear: album.year },
})
);
await Promise.all(updatePromises);
processed += batch.length;
updated += batch.length;
const progress = (
(processed / albumsToBackfill.length) *
100
).toFixed(1);
console.log(
`Progress: ${processed}/${albumsToBackfill.length} (${progress}%) albums updated`
);
}
console.log(`\n✓ Backfill complete!`);
console.log(` - Total albums updated: ${updated}`);
console.log(` - Field populated: originalYear`);
console.log(
`\nNote: Future albums will have originalYear populated automatically during enrichment.`
);
} catch (error) {
console.error("\n✗ Error during backfill:", error);
throw error;
} finally {
await prisma.$disconnect();
}
}
// Parse command line arguments
const args = process.argv.slice(2);
const dryRun = args.includes("--dry-run");
// Run the backfill
backfillOriginalYear(dryRun)
.then(() => {
process.exit(0);
})
.catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -1,6 +1,8 @@
import dotenv from "dotenv";
import { z } from "zod";
import * as fs from "fs";
import { validateMusicConfig, MusicConfig } from "./utils/configValidator";
import { logger } from "./utils/logger";
dotenv.config();
@@ -18,14 +20,14 @@ const envSchema = z.object({
try {
envSchema.parse(process.env);
console.log("Environment variables validated");
logger.debug("Environment variables validated");
} catch (error) {
if (error instanceof z.ZodError) {
console.error(" Environment validation failed:");
logger.error(" Environment validation failed:");
error.errors.forEach((err) => {
console.error(` - ${err.path.join(".")}: ${err.message}`);
logger.error(` - ${err.path.join(".")}: ${err.message}`);
});
console.error(
logger.error(
"\n Please check your .env file and ensure all required variables are set."
);
process.exit(1);
@@ -47,10 +49,10 @@ let musicConfig: MusicConfig = {
export async function initializeMusicConfig() {
try {
musicConfig = await validateMusicConfig();
console.log("Music configuration initialized");
logger.debug("Music configuration initialized");
} catch (err: any) {
console.error(" Configuration validation failed:", err.message);
console.warn(" Using default/environment configuration");
logger.error(" Configuration validation failed:", err.message);
logger.warn(" Using default/environment configuration");
// Don't exit process - allow app to start for other features
// Music features will fail gracefully if config is invalid
}
@@ -80,11 +82,9 @@ export const config = {
}
: undefined,
// Last.fm - ships with default app key, users can override in settings
// Last.fm - ships with default app key, user can optionally override
lastfm: {
// Default application API key (free tier, for public use)
// Users can override this in System Settings with their own key
apiKey: process.env.LASTFM_API_KEY || "c1797de6bf0b7e401b623118120cd9e1",
apiKey: process.env.LASTFM_API_KEY || "95fe0eaa9875db7bb8539b2c738b4dcd",
},
// OpenAI - reads from database

View File

@@ -6,6 +6,7 @@ import helmet from "helmet";
import { config } from "./config";
import { redisClient } from "./utils/redis";
import { prisma } from "./utils/db";
import { logger } from "./utils/logger";
import authRoutes from "./routes/auth";
import onboardingRoutes from "./routes/onboarding";
@@ -38,6 +39,7 @@ import analysisRoutes from "./routes/analysis";
import releasesRoutes from "./routes/releases";
import { dataCacheService } from "./services/dataCache";
import { errorHandler } from "./middleware/errorHandler";
import { requireAuth, requireAdmin } from "./middleware/auth";
import {
authLimiter,
apiLimiter,
@@ -80,7 +82,7 @@ app.use(
} else {
// For self-hosted: allow anyway but log it
// Users shouldn't have to configure CORS for their own app
console.log(
logger.debug(
`[CORS] Origin ${origin} not in allowlist, allowing anyway (self-hosted)`
);
callback(null, true);
@@ -111,10 +113,8 @@ app.use(
proxy: true, // Trust the reverse proxy
cookie: {
httpOnly: true,
// For self-hosted apps: allow HTTP access (common for LAN deployments)
// If behind HTTPS reverse proxy, the proxy should handle security
secure: false,
sameSite: "lax",
secure: process.env.NODE_ENV === "production",
sameSite: "strict",
maxAge: 1000 * 60 * 60 * 24 * 7, // 7 days
},
})
@@ -167,8 +167,15 @@ app.get("/api/health", (req, res) => {
});
// Swagger API Documentation
// In production: require auth unless DOCS_PUBLIC=true
// In development: always public for easier testing
const docsMiddleware = config.nodeEnv === "production" && process.env.DOCS_PUBLIC !== "true"
? [requireAuth]
: [];
app.use(
"/api/docs",
...docsMiddleware,
swaggerUi.serve,
swaggerUi.setup(swaggerSpec, {
customCss: ".swagger-ui .topbar { display: none }",
@@ -177,15 +184,60 @@ app.use(
);
// Serve raw OpenAPI spec
app.get("/api/docs.json", (req, res) => {
app.get("/api/docs.json", ...docsMiddleware, (req, res) => {
res.json(swaggerSpec);
});
// Error handler
app.use(errorHandler);
// Health check functions
async function checkPostgresConnection() {
try {
await prisma.$queryRaw`SELECT 1`;
logger.debug("✓ PostgreSQL connection verified");
} catch (error) {
logger.error("✗ PostgreSQL connection failed:", {
error: error instanceof Error ? error.message : String(error),
databaseUrl: config.databaseUrl?.replace(/:[^:@]+@/, ':***@') // Hide password
});
logger.error("Unable to connect to PostgreSQL. Please ensure:");
logger.error(" 1. PostgreSQL is running on the correct port (default: 5433)");
logger.error(" 2. DATABASE_URL in .env is correct");
logger.error(" 3. Database credentials are valid");
process.exit(1);
}
}
async function checkRedisConnection() {
try {
// Check if Redis client is actually connected
// The redis client has automatic reconnection, so we need to check status first
if (!redisClient.isReady) {
throw new Error("Redis client is not ready - connection failed or still connecting");
}
// If connected, verify with ping
await redisClient.ping();
logger.debug("✓ Redis connection verified");
} catch (error) {
logger.error("✗ Redis connection failed:", {
error: error instanceof Error ? error.message : String(error),
redisUrl: config.redisUrl?.replace(/:[^:@]+@/, ':***@') // Hide password if any
});
logger.error("Unable to connect to Redis. Please ensure:");
logger.error(" 1. Redis is running on the correct port (default: 6380)");
logger.error(" 2. REDIS_URL in .env is correct");
process.exit(1);
}
}
app.listen(config.port, "0.0.0.0", async () => {
console.log(
// Verify database connections before proceeding
await checkPostgresConnection();
await checkRedisConnection();
logger.debug(
`Lidify API running on port ${config.port} (accessible on all network interfaces)`
);
@@ -224,8 +276,8 @@ app.listen(config.port, "0.0.0.0", async () => {
serverAdapter,
});
app.use("/api/admin/queues", serverAdapter.getRouter());
console.log("Bull Board dashboard available at /api/admin/queues");
app.use("/api/admin/queues", requireAuth, requireAdmin, serverAdapter.getRouter());
logger.debug("Bull Board dashboard available at /api/admin/queues (admin-only)");
// Note: Native library scanning is now triggered manually via POST /library/scan
// No automatic sync on startup - user must manually scan their music folder
@@ -233,7 +285,7 @@ app.listen(config.port, "0.0.0.0", async () => {
// Enrichment worker enabled for OWNED content only
// - Background enrichment: Genres, MBIDs, similar artists for owned albums/artists
// - On-demand fetching: Artist images, bios when browsing (cached in Redis 7 days)
console.log(
logger.debug(
"Background enrichment enabled for owned content (genres, MBIDs, etc.)"
);
@@ -241,7 +293,7 @@ app.listen(config.port, "0.0.0.0", async () => {
// This populates Redis with existing artist images and album covers
// so first page loads are instant instead of waiting for cache population
dataCacheService.warmupCache().catch((err) => {
console.error("Cache warmup failed:", err);
logger.error("Cache warmup failed:", err);
});
// Podcast cache cleanup - runs daily to remove cached episodes older than 30 days
@@ -249,17 +301,62 @@ app.listen(config.port, "0.0.0.0", async () => {
// Run cleanup on startup (async, don't block)
cleanupExpiredCache().catch((err) => {
console.error("Podcast cache cleanup failed:", err);
logger.error("Podcast cache cleanup failed:", err);
});
// Schedule daily cleanup (every 24 hours)
const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000;
setInterval(() => {
cleanupExpiredCache().catch((err) => {
console.error("Scheduled podcast cache cleanup failed:", err);
logger.error("Scheduled podcast cache cleanup failed:", err);
});
}, TWENTY_FOUR_HOURS);
console.log("Podcast cache cleanup scheduled (daily, 30-day expiry)");
logger.debug("Podcast cache cleanup scheduled (daily, 30-day expiry)");
// Auto-sync audiobooks on startup if cache is empty
// This prevents "disappeared" audiobooks after container rebuilds
(async () => {
try {
const { getSystemSettings } = await import("./utils/systemSettings");
const settings = await getSystemSettings();
// Only proceed if Audiobookshelf is configured and enabled
if (settings?.audiobookshelfEnabled && settings?.audiobookshelfUrl) {
// Check if cache is empty
const cachedCount = await prisma.audiobook.count();
if (cachedCount === 0) {
logger.debug(
"[STARTUP] Audiobook cache is empty - auto-syncing from Audiobookshelf..."
);
const { audiobookCacheService } = await import(
"./services/audiobookCache"
);
const result = await audiobookCacheService.syncAll();
logger.debug(
`[STARTUP] Audiobook auto-sync complete: ${result.synced} audiobooks cached`
);
} else {
logger.debug(
`[STARTUP] Audiobook cache has ${cachedCount} entries - skipping auto-sync`
);
}
}
} catch (err) {
logger.error("[STARTUP] Audiobook auto-sync failed:", err);
// Non-fatal - user can manually sync later
}
})();
// Reconcile download queue state with database
const { downloadQueueManager } = await import("./services/downloadQueue");
try {
const result = await downloadQueueManager.reconcileOnStartup();
logger.debug(`Download queue reconciled: ${result.loaded} active, ${result.failed} marked failed`);
} catch (err) {
logger.error("Download queue reconciliation failed:", err);
// Non-fatal - queue will start fresh
}
});
// Graceful shutdown handling
@@ -267,12 +364,12 @@ let isShuttingDown = false;
async function gracefulShutdown(signal: string) {
if (isShuttingDown) {
console.log("Shutdown already in progress...");
logger.debug("Shutdown already in progress...");
return;
}
isShuttingDown = true;
console.log(`\nReceived ${signal}. Starting graceful shutdown...`);
logger.debug(`\nReceived ${signal}. Starting graceful shutdown...`);
try {
// Shutdown workers (intervals, crons, queues)
@@ -280,17 +377,17 @@ async function gracefulShutdown(signal: string) {
await shutdownWorkers();
// Close Redis connection
console.log("Closing Redis connection...");
logger.debug("Closing Redis connection...");
await redisClient.quit();
// Close Prisma connection
console.log("Closing database connection...");
logger.debug("Closing database connection...");
await prisma.$disconnect();
console.log("Graceful shutdown complete");
logger.debug("Graceful shutdown complete");
process.exit(0);
} catch (error) {
console.error("Error during shutdown:", error);
logger.error("Error during shutdown:", error);
process.exit(1);
}
}

View File

@@ -1,4 +1,5 @@
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import { getSystemSettings } from "../utils/systemSettings";
import {
cleanStuckDownloads,
@@ -14,19 +15,45 @@ class QueueCleanerService {
private maxEmptyChecks = 3; // Stop after 3 consecutive empty checks
private timeoutId?: NodeJS.Timeout;
// Cached dynamic imports (lazy-loaded once, reused on subsequent calls)
private discoverWeeklyService: typeof import("../services/discoverWeekly")["discoverWeeklyService"] | null = null;
private matchAlbum: typeof import("../utils/fuzzyMatch")["matchAlbum"] | null = null;
/**
* Get discoverWeeklyService (lazy-loaded and cached)
*/
private async getDiscoverWeeklyService() {
if (!this.discoverWeeklyService) {
const module = await import("../services/discoverWeekly");
this.discoverWeeklyService = module.discoverWeeklyService;
}
return this.discoverWeeklyService;
}
/**
* Get matchAlbum function (lazy-loaded and cached)
*/
private async getMatchAlbum() {
if (!this.matchAlbum) {
const module = await import("../utils/fuzzyMatch");
this.matchAlbum = module.matchAlbum;
}
return this.matchAlbum;
}
/**
* Start the polling loop
* Safe to call multiple times - won't create duplicate loops
*/
async start() {
if (this.isRunning) {
console.log(" Queue cleaner already running");
logger.debug(" Queue cleaner already running");
return;
}
this.isRunning = true;
this.emptyQueueChecks = 0;
console.log(" Queue cleaner started (checking every 30s)");
logger.debug(" Queue cleaner started (checking every 30s)");
await this.runCleanup();
}
@@ -40,7 +67,7 @@ class QueueCleanerService {
this.timeoutId = undefined;
}
this.isRunning = false;
console.log(" Queue cleaner stopped (queue empty)");
logger.debug(" Queue cleaner stopped (queue empty)");
}
/**
@@ -54,7 +81,7 @@ class QueueCleanerService {
const settings = await getSystemSettings();
if (!settings?.lidarrUrl || !settings?.lidarrApiKey) {
console.log(" Lidarr not configured, stopping queue cleaner");
logger.debug(" Lidarr not configured, stopping queue cleaner");
this.stop();
return;
}
@@ -63,7 +90,7 @@ class QueueCleanerService {
const staleCount =
await simpleDownloadManager.markStaleJobsAsFailed();
if (staleCount > 0) {
console.log(`⏰ Cleaned up ${staleCount} stale download(s)`);
logger.debug(`⏰ Cleaned up ${staleCount} stale download(s)`);
this.emptyQueueChecks = 0; // Reset counter
}
@@ -71,20 +98,37 @@ class QueueCleanerService {
const reconcileResult =
await simpleDownloadManager.reconcileWithLidarr();
if (reconcileResult.reconciled > 0) {
console.log(
logger.debug(
`✓ Reconciled ${reconcileResult.reconciled} job(s) with Lidarr`
);
this.emptyQueueChecks = 0; // Reset counter
}
// PART 0.26: Sync with Lidarr queue (detect cancelled downloads)
const queueSyncResult = await simpleDownloadManager.syncWithLidarrQueue();
if (queueSyncResult.cancelled > 0) {
logger.debug(
`✓ Synced ${queueSyncResult.cancelled} job(s) with Lidarr queue (cancelled/completed)`
);
this.emptyQueueChecks = 0; // Reset counter
}
// PART 0.3: Reconcile processing jobs with local library (critical fix for #31)
// Check if albums already exist in Lidify's database even if Lidarr webhooks were missed
const localReconcileResult = await this.reconcileWithLocalLibrary();
if (localReconcileResult.reconciled > 0) {
logger.debug(
`✓ Reconciled ${localReconcileResult.reconciled} job(s) with local library`
);
this.emptyQueueChecks = 0; // Reset counter
}
// PART 0.5: Check for stuck discovery batches (batch-level timeout)
const { discoverWeeklyService } = await import(
"../services/discoverWeekly"
);
const discoverWeeklyService = await this.getDiscoverWeeklyService();
const stuckBatchCount =
await discoverWeeklyService.checkStuckBatches();
if (stuckBatchCount > 0) {
console.log(
logger.debug(
`⏰ Force-completed ${stuckBatchCount} stuck discovery batch(es)`
);
this.emptyQueueChecks = 0; // Reset counter
@@ -97,7 +141,7 @@ class QueueCleanerService {
);
if (cleanResult.removed > 0) {
console.log(
logger.debug(
`[CLEANUP] Removed ${cleanResult.removed} stuck download(s) - searching for alternatives`
);
this.emptyQueueChecks = 0; // Reset counter - queue had activity
@@ -143,7 +187,7 @@ class QueueCleanerService {
},
});
console.log(
logger.debug(
` Updated job ${job.id}: retry ${
currentRetryCount + 1
}`
@@ -187,10 +231,10 @@ class QueueCleanerService {
const artistName =
download.artist?.name || "Unknown Artist";
const albumTitle = download.album?.title || "Unknown Album";
console.log(
logger.debug(
`Recovered orphaned job: ${artistName} - ${albumTitle}`
);
console.log(` Download ID: ${download.downloadId}`);
logger.debug(` Download ID: ${download.downloadId}`);
this.emptyQueueChecks = 0; // Reset counter - found work to do
recoveredCount += orphanedJobs.length;
@@ -219,11 +263,9 @@ class QueueCleanerService {
}
if (discoveryBatchIds.size > 0) {
const { discoverWeeklyService } = await import(
"../services/discoverWeekly"
);
const discoverWeeklyService = await this.getDiscoverWeeklyService();
for (const batchId of discoveryBatchIds) {
console.log(
logger.debug(
` Checking Discovery batch completion: ${batchId}`
);
await discoverWeeklyService.checkBatchCompletion(
@@ -238,7 +280,7 @@ class QueueCleanerService {
!j.discoveryBatchId
);
if (nonDiscoveryJobs.length > 0) {
console.log(
logger.debug(
` Triggering library scan for recovered job(s)...`
);
await scanQueue.add("scan", {
@@ -250,12 +292,12 @@ class QueueCleanerService {
}
if (recoveredCount > 0) {
console.log(`Recovered ${recoveredCount} orphaned job(s)`);
logger.debug(`Recovered ${recoveredCount} orphaned job(s)`);
}
// Only log skipped count occasionally to reduce noise
if (skippedCount > 0 && this.emptyQueueChecks === 0) {
console.log(
logger.debug(
` (Skipped ${skippedCount} incomplete download records)`
);
}
@@ -272,12 +314,12 @@ class QueueCleanerService {
if (!hadActivity) {
this.emptyQueueChecks++;
console.log(
logger.debug(
` Queue empty (${this.emptyQueueChecks}/${this.maxEmptyChecks})`
);
if (this.emptyQueueChecks >= this.maxEmptyChecks) {
console.log(
logger.debug(
` No activity for ${this.maxEmptyChecks} checks - stopping cleaner`
);
this.stop();
@@ -293,7 +335,7 @@ class QueueCleanerService {
this.checkInterval
);
} catch (error) {
console.error(" Queue cleanup error:", error);
logger.error(" Queue cleanup error:", error);
// Still schedule next check even on error
this.timeoutId = setTimeout(
() => this.runCleanup(),
@@ -302,6 +344,171 @@ class QueueCleanerService {
}
}
/**
* Reconcile processing jobs with local library (Phase 1 & 3 fix for #31)
* Checks if albums already exist in Lidify's database and marks matching jobs as complete
* This handles cases where:
* - Lidarr webhooks were missed
* - MBID mismatches between MusicBrainz and Lidarr
* - Album/artist name differences prevent webhook matching
*
* Phase 3 enhancement: Uses fuzzy matching to catch more name variations
*
* PUBLIC: Called by periodic reconciliation in workers/index.ts
*/
async reconcileWithLocalLibrary(): Promise<{ reconciled: number }> {
const processingJobs = await prisma.downloadJob.findMany({
where: { status: { in: ["pending", "processing"] } },
});
if (processingJobs.length === 0) {
return { reconciled: 0 };
}
logger.debug(
`[LOCAL-RECONCILE] Checking ${processingJobs.length} job(s) against local library...`
);
let reconciled = 0;
for (const job of processingJobs) {
const metadata = (job.metadata as any) || {};
const artistName = metadata?.artistName;
const albumTitle = metadata?.albumTitle;
if (!artistName || !albumTitle) {
continue;
}
try {
// First try: Exact/contains match (fast)
let localAlbum = await prisma.album.findFirst({
where: {
AND: [
{
artist: {
name: {
contains: artistName,
mode: "insensitive",
},
},
},
{
title: {
contains: albumTitle,
mode: "insensitive",
},
},
],
},
include: {
tracks: {
select: { id: true },
take: 1,
},
artist: {
select: { name: true },
},
},
});
// Second try: Fuzzy match if exact match failed (slower but more thorough)
if (!localAlbum || localAlbum.tracks.length === 0) {
const matchAlbum = await this.getMatchAlbum();
// Get all albums from artists with similar names
const candidateAlbums = await prisma.album.findMany({
where: {
artist: {
name: {
contains: artistName.substring(0, 5),
mode: "insensitive",
},
},
},
include: {
tracks: {
select: { id: true },
take: 1,
},
artist: {
select: { name: true },
},
},
take: 50, // Limit to prevent performance issues
});
// Find best fuzzy match
const fuzzyMatch = candidateAlbums.find(
(album) =>
album.tracks.length > 0 &&
matchAlbum(
artistName,
albumTitle,
album.artist.name,
album.title,
0.75
)
);
if (fuzzyMatch) {
localAlbum = fuzzyMatch;
}
if (localAlbum) {
logger.debug(
`[LOCAL-RECONCILE] Fuzzy matched "${artistName} - ${albumTitle}" to "${localAlbum.artist.name} - ${localAlbum.title}"`
);
}
}
if (localAlbum && localAlbum.tracks.length > 0) {
logger.debug(
`[LOCAL-RECONCILE] ✓ Found "${localAlbum.artist.name} - ${localAlbum.title}" in library for job ${job.id}`
);
// Album exists with tracks - mark job complete
await prisma.downloadJob.update({
where: { id: job.id },
data: {
status: "completed",
completedAt: new Date(),
error: null,
metadata: {
...metadata,
completedAt: new Date().toISOString(),
reconciledFromLocalLibrary: true,
},
},
});
reconciled++;
// Check batch completion for discovery jobs
if (job.discoveryBatchId) {
const discoverWeeklyService = await this.getDiscoverWeeklyService();
await discoverWeeklyService.checkBatchCompletion(
job.discoveryBatchId
);
}
}
} catch (error: any) {
logger.error(
`[LOCAL-RECONCILE] Error checking job ${job.id}:`,
error.message
);
}
}
if (reconciled > 0) {
logger.debug(
`[LOCAL-RECONCILE] Marked ${reconciled} job(s) complete from local library`
);
}
return { reconciled };
}
/**
* Get current status (for debugging/monitoring)
*/

View File

@@ -1,4 +1,5 @@
import { Request, Response, NextFunction } from "express";
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import jwt from "jsonwebtoken";
@@ -11,6 +12,9 @@ if (!JWT_SECRET) {
);
}
// Type assertion after validation - JWT_SECRET is guaranteed to be a string
const JWT_SECRET_VALIDATED: string = JWT_SECRET;
declare global {
namespace Express {
interface Request {
@@ -23,91 +27,177 @@ declare global {
}
}
export interface AuthenticatedRequest extends Request {
user: {
id: string;
username: string;
role: string;
};
}
export interface JWTPayload {
userId: string;
username: string;
role: string;
tokenVersion?: number;
type?: string;
}
export function generateToken(user: { id: string; username: string; role: string }): string {
export function generateToken(user: {
id: string;
username: string;
role: string;
tokenVersion: number;
}): string {
return jwt.sign(
{ userId: user.id, username: user.username, role: user.role },
JWT_SECRET,
{
userId: user.id,
username: user.username,
role: user.role,
tokenVersion: user.tokenVersion
},
JWT_SECRET_VALIDATED,
{ expiresIn: "24h" }
);
}
export function generateRefreshToken(user: {
id: string;
tokenVersion: number;
}): string {
return jwt.sign(
{
userId: user.id,
tokenVersion: user.tokenVersion,
type: "refresh"
},
JWT_SECRET_VALIDATED,
{ expiresIn: "30d" }
);
}
/**
* Helper function to authenticate a request using session, API key, or JWT
* @param req Express request object
* @param checkQueryToken Whether to check for token in query params (for streaming)
* @returns User object if authenticated, null otherwise
*/
async function authenticateRequest(
req: Request,
checkQueryToken: boolean = false
): Promise<{ id: string; username: string; role: string } | null> {
// Check session-based auth
if (req.session?.userId) {
try {
const user = await prisma.user.findUnique({
where: { id: req.session.userId },
select: { id: true, username: true, role: true },
});
if (user) return user;
} catch (error) {
logger.error("Session auth error:", error);
}
}
// Check for API key in X-API-Key header
const apiKey = req.headers["x-api-key"] as string;
if (apiKey) {
try {
const apiKeyRecord = await prisma.apiKey.findUnique({
where: { key: apiKey },
include: {
user: { select: { id: true, username: true, role: true } },
},
});
if (apiKeyRecord && apiKeyRecord.user) {
// Update last used timestamp (async, don't block)
prisma.apiKey
.update({
where: { id: apiKeyRecord.id },
data: { lastUsed: new Date() },
})
.catch(() => {});
return apiKeyRecord.user;
}
} catch (error) {
logger.error("API key auth error:", error);
}
}
// Check for token in query param (for streaming URLs)
if (checkQueryToken) {
const tokenParam = req.query.token as string;
if (tokenParam) {
try {
const decoded = jwt.verify(
tokenParam,
JWT_SECRET_VALIDATED
) as unknown as JWTPayload;
const user = await prisma.user.findUnique({
where: { id: decoded.userId },
select: { id: true, username: true, role: true, tokenVersion: true },
});
if (user) {
// Validate tokenVersion - reject if password was changed
if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) {
return null;
}
return { id: user.id, username: user.username, role: user.role };
}
} catch (error) {
// Token invalid, try other methods
}
}
}
// Check JWT token in Authorization header
const authHeader = req.headers.authorization;
const token = authHeader?.startsWith("Bearer ")
? authHeader.substring(7)
: null;
if (token) {
try {
const decoded = jwt.verify(token, JWT_SECRET_VALIDATED) as unknown as JWTPayload;
const user = await prisma.user.findUnique({
where: { id: decoded.userId },
select: { id: true, username: true, role: true, tokenVersion: true },
});
if (user) {
// Validate tokenVersion - reject if password was changed
if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) {
return null;
}
return { id: user.id, username: user.username, role: user.role };
}
} catch (error) {
// Token invalid
}
}
return null;
}
export async function requireAuth(
req: Request,
res: Response,
next: NextFunction
) {
// First, check session-based auth (primary method)
if (req.session?.userId) {
try {
const user = await prisma.user.findUnique({
where: { id: req.session.userId },
select: { id: true, username: true, role: true },
});
if (user) {
req.user = user;
return next();
}
} catch (error) {
console.error("Session auth error:", error);
}
const user = await authenticateRequest(req, false);
if (user) {
req.user = user;
return next();
}
// Check for API key in X-API-Key header (for mobile/external apps)
const apiKey = req.headers["x-api-key"] as string;
if (apiKey) {
try {
const apiKeyRecord = await prisma.apiKey.findUnique({
where: { key: apiKey },
include: { user: { select: { id: true, username: true, role: true } } },
});
if (apiKeyRecord && apiKeyRecord.user) {
// Update last used timestamp (async, don't block)
prisma.apiKey.update({
where: { id: apiKeyRecord.id },
data: { lastUsed: new Date() },
}).catch(() => {}); // Ignore errors on lastUsed update
req.user = apiKeyRecord.user;
return next();
}
} catch (error) {
console.error("API key auth error:", error);
}
}
// Fallback: check JWT token in Authorization header
const authHeader = req.headers.authorization;
const token = authHeader?.startsWith("Bearer ") ? authHeader.substring(7) : null;
if (token) {
try {
const decoded = jwt.verify(token, JWT_SECRET) as JWTPayload;
const user = await prisma.user.findUnique({
where: { id: decoded.userId },
select: { id: true, username: true, role: true },
});
if (user) {
req.user = user;
return next();
}
} catch (error) {
// Token invalid, continue to error
}
}
return res.status(401).json({ error: "Not authenticated" });
}
export async function requireAdmin(req: Request, res: Response, next: NextFunction) {
export async function requireAdmin(
req: Request,
res: Response,
next: NextFunction
) {
if (!req.user || req.user.role !== "admin") {
return res.status(403).json({ error: "Admin access required" });
}
@@ -133,7 +223,7 @@ export async function requireAuthOrToken(
return next();
}
} catch (error) {
console.error("Session auth error:", error);
logger.error("Session auth error:", error);
}
}
@@ -143,21 +233,25 @@ export async function requireAuthOrToken(
try {
const apiKeyRecord = await prisma.apiKey.findUnique({
where: { key: apiKey },
include: { user: { select: { id: true, username: true, role: true } } },
include: {
user: { select: { id: true, username: true, role: true } },
},
});
if (apiKeyRecord && apiKeyRecord.user) {
// Update last used timestamp (async, don't block)
prisma.apiKey.update({
where: { id: apiKeyRecord.id },
data: { lastUsed: new Date() },
}).catch(() => {}); // Ignore errors on lastUsed update
prisma.apiKey
.update({
where: { id: apiKeyRecord.id },
data: { lastUsed: new Date() },
})
.catch(() => {}); // Ignore errors on lastUsed update
req.user = apiKeyRecord.user;
return next();
}
} catch (error) {
console.error("API key auth error:", error);
logger.error("API key auth error:", error);
}
}
@@ -165,15 +259,20 @@ export async function requireAuthOrToken(
const tokenParam = req.query.token as string;
if (tokenParam) {
try {
const decoded = jwt.verify(tokenParam, JWT_SECRET) as JWTPayload;
const decoded = jwt.verify(tokenParam, JWT_SECRET_VALIDATED) as unknown as JWTPayload;
const user = await prisma.user.findUnique({
where: { id: decoded.userId },
select: { id: true, username: true, role: true },
select: { id: true, username: true, role: true, tokenVersion: true },
});
if (user) {
req.user = user;
return next();
// Validate tokenVersion - reject if password was changed
if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) {
// Token was issued before password change, reject
} else {
req.user = { id: user.id, username: user.username, role: user.role };
return next();
}
}
} catch (error) {
// Token invalid, try other methods
@@ -182,19 +281,26 @@ export async function requireAuthOrToken(
// Fallback: check JWT token in Authorization header
const authHeader = req.headers.authorization;
const token = authHeader?.startsWith("Bearer ") ? authHeader.substring(7) : null;
const token = authHeader?.startsWith("Bearer ")
? authHeader.substring(7)
: null;
if (token) {
try {
const decoded = jwt.verify(token, JWT_SECRET) as JWTPayload;
const decoded = jwt.verify(token, JWT_SECRET_VALIDATED) as unknown as JWTPayload;
const user = await prisma.user.findUnique({
where: { id: decoded.userId },
select: { id: true, username: true, role: true },
select: { id: true, username: true, role: true, tokenVersion: true },
});
if (user) {
req.user = user;
return next();
// Validate tokenVersion - reject if password was changed
if (decoded.tokenVersion === undefined || decoded.tokenVersion !== user.tokenVersion) {
// Token was issued before password change, reject
} else {
req.user = { id: user.id, username: user.username, role: user.role };
return next();
}
}
} catch (error) {
// Token invalid, continue to error

View File

@@ -1,4 +1,7 @@
import { Request, Response, NextFunction } from "express";
import { logger } from "../utils/logger";
import { AppError, ErrorCategory } from "../utils/errors";
import { config } from "../config";
export function errorHandler(
err: Error,
@@ -6,6 +9,43 @@ export function errorHandler(
res: Response,
next: NextFunction
) {
console.error(err.stack);
res.status(500).json({ error: "Internal server error" });
// Handle AppError with proper categorization
if (err instanceof AppError) {
// Map error category to HTTP status code
let statusCode = 500;
switch (err.category) {
case ErrorCategory.RECOVERABLE:
statusCode = 400; // Bad Request - client can retry with changes
break;
case ErrorCategory.TRANSIENT:
statusCode = 503; // Service Unavailable - client can retry later
break;
case ErrorCategory.FATAL:
statusCode = 500; // Internal Server Error - cannot recover
break;
}
logger.error(`[AppError] ${err.code}: ${err.message}`, err.details);
return res.status(statusCode).json({
error: err.message,
code: err.code,
category: err.category,
...(config.nodeEnv === "development" && { details: err.details }),
});
}
// Log stack trace for unhandled errors
logger.error("Unhandled error:", err.stack);
// In production, hide stack traces and internal details
if (config.nodeEnv === "production") {
return res.status(500).json({ error: "Internal server error" });
}
// In development, provide more details
res.status(500).json({
error: err.message || "Internal server error",
stack: err.stack,
});
}

View File

@@ -1,7 +1,10 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import { redisClient } from "../utils/redis";
import { requireAuth, requireAdmin } from "../middleware/auth";
import { getSystemSettings } from "../utils/systemSettings";
import os from "os";
const router = Router();
@@ -42,7 +45,7 @@ router.get("/status", requireAuth, async (req, res) => {
isComplete: pending === 0 && processing === 0 && queueLength === 0,
});
} catch (error: any) {
console.error("Analysis status error:", error);
logger.error("Analysis status error:", error);
res.status(500).json({ error: "Failed to get analysis status" });
}
});
@@ -87,14 +90,14 @@ router.post("/start", requireAuth, requireAdmin, async (req, res) => {
}
await pipeline.exec();
console.log(`Queued ${tracks.length} tracks for audio analysis`);
logger.debug(`Queued ${tracks.length} tracks for audio analysis`);
res.json({
message: `Queued ${tracks.length} tracks for analysis`,
queued: tracks.length,
});
} catch (error: any) {
console.error("Analysis start error:", error);
logger.error("Analysis start error:", error);
res.status(500).json({ error: "Failed to start analysis" });
}
});
@@ -121,7 +124,7 @@ router.post("/retry-failed", requireAuth, requireAdmin, async (req, res) => {
reset: result.count,
});
} catch (error: any) {
console.error("Retry failed error:", error);
logger.error("Retry failed error:", error);
res.status(500).json({ error: "Failed to retry analysis" });
}
});
@@ -166,7 +169,7 @@ router.post("/analyze/:trackId", requireAuth, async (req, res) => {
trackId,
});
} catch (error: any) {
console.error("Analyze track error:", error);
logger.error("Analyze track error:", error);
res.status(500).json({ error: "Failed to queue track for analysis" });
}
});
@@ -214,7 +217,7 @@ router.get("/track/:trackId", requireAuth, async (req, res) => {
res.json(track);
} catch (error: any) {
console.error("Get track analysis error:", error);
logger.error("Get track analysis error:", error);
res.status(500).json({ error: "Failed to get track analysis" });
}
});
@@ -280,14 +283,77 @@ router.get("/features", requireAuth, async (req, res) => {
},
});
} catch (error: any) {
console.error("Get features error:", error);
logger.error("Get features error:", error);
res.status(500).json({ error: "Failed to get feature statistics" });
}
});
/**
* GET /api/analysis/workers
* Get current audio analyzer worker configuration
*/
router.get("/workers", requireAuth, requireAdmin, async (req, res) => {
try {
const settings = await getSystemSettings();
const cpuCores = os.cpus().length;
const currentWorkers = settings?.audioAnalyzerWorkers || 2;
// Recommended: 50% of CPU cores, min 2, max 8
const recommended = Math.max(2, Math.min(8, Math.floor(cpuCores / 2)));
res.json({
workers: currentWorkers,
cpuCores,
recommended,
description: `Using ${currentWorkers} of ${cpuCores} available CPU cores`,
});
} catch (error: any) {
logger.error("Get workers config error:", error);
res.status(500).json({ error: "Failed to get worker configuration" });
}
});
/**
* PUT /api/analysis/workers
* Update audio analyzer worker count
*/
router.put("/workers", requireAuth, requireAdmin, async (req, res) => {
try {
const { workers } = req.body;
if (typeof workers !== 'number' || workers < 1 || workers > 8) {
return res.status(400).json({
error: "Workers must be a number between 1 and 8"
});
}
// Update SystemSettings
await prisma.systemSettings.update({
where: { id: "default" },
data: { audioAnalyzerWorkers: workers },
});
// Publish control signal to Redis for Python worker to pick up
await redisClient.publish(
"audio:analysis:control",
JSON.stringify({ command: "set_workers", count: workers })
);
const cpuCores = os.cpus().length;
const recommended = Math.max(2, Math.min(8, Math.floor(cpuCores / 2)));
logger.info(`Audio analyzer workers updated to ${workers}`);
res.json({
workers,
cpuCores,
recommended,
description: `Using ${workers} of ${cpuCores} available CPU cores`,
});
} catch (error: any) {
logger.error("Update workers config error:", error);
res.status(500).json({ error: "Failed to update worker configuration" });
}
});
export default router;

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
import crypto from "crypto";
@@ -88,7 +89,7 @@ router.post("/", async (req, res) => {
},
});
console.log(`API key created for user ${userId}: ${deviceName}`);
logger.debug(`API key created for user ${userId}: ${deviceName}`);
res.status(201).json({
apiKey: apiKey.key,
@@ -98,7 +99,7 @@ router.post("/", async (req, res) => {
"API key created successfully. Save this key - you won't see it again!",
});
} catch (error) {
console.error("Create API key error:", error);
logger.error("Create API key error:", error);
res.status(500).json({ error: "Failed to create API key" });
}
});
@@ -152,7 +153,7 @@ router.get("/", async (req, res) => {
res.json({ apiKeys: keys });
} catch (error) {
console.error("List API keys error:", error);
logger.error("List API keys error:", error);
res.status(500).json({ error: "Failed to list API keys" });
}
});
@@ -219,11 +220,11 @@ router.delete("/:id", async (req, res) => {
.json({ error: "API key not found or already deleted" });
}
console.log(`API key ${keyId} revoked by user ${userId}`);
logger.debug(`API key ${keyId} revoked by user ${userId}`);
res.json({ message: "API key revoked successfully" });
} catch (error) {
console.error("Delete API key error:", error);
logger.error("Delete API key error:", error);
res.status(500).json({ error: "Failed to revoke API key" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { lastFmService } from "../services/lastfm";
import { musicBrainzService } from "../services/musicbrainz";
import { fanartService } from "../services/fanart";
@@ -17,7 +18,7 @@ router.get("/preview/:artistName/:trackTitle", async (req, res) => {
const decodedArtist = decodeURIComponent(artistName);
const decodedTrack = decodeURIComponent(trackTitle);
console.log(
logger.debug(
`Getting preview for "${decodedTrack}" by ${decodedArtist}`
);
@@ -32,7 +33,7 @@ router.get("/preview/:artistName/:trackTitle", async (req, res) => {
res.status(404).json({ error: "Preview not found" });
}
} catch (error: any) {
console.error("Preview fetch error:", error);
logger.error("Preview fetch error:", error);
res.status(500).json({
error: "Failed to fetch preview",
message: error.message,
@@ -50,7 +51,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(`[Discovery] Cache hit for artist: ${nameOrMbid}`);
logger.debug(`[Discovery] Cache hit for artist: ${nameOrMbid}`);
return res.json(JSON.parse(cached));
}
} catch (err) {
@@ -108,7 +109,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
lowerBio.includes("multiple artists")
) {
// This is a disambiguation page - don't show it
console.log(
logger.debug(
` Filtered out disambiguation biography for ${artistName}`
);
bio = null;
@@ -125,7 +126,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
10
);
} catch (error) {
console.log(`Failed to get top tracks for ${artistName}`);
logger.debug(`Failed to get top tracks for ${artistName}`);
}
}
@@ -136,9 +137,9 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
if (mbid) {
try {
image = await fanartService.getArtistImage(mbid);
console.log(`Fanart.tv image for ${artistName}`);
logger.debug(`Fanart.tv image for ${artistName}`);
} catch (error) {
console.log(
logger.debug(
`✗ Failed to get Fanart.tv image for ${artistName}`
);
}
@@ -149,10 +150,10 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
try {
image = await deezerService.getArtistImage(artistName);
if (image) {
console.log(`Deezer image for ${artistName}`);
logger.debug(`Deezer image for ${artistName}`);
}
} catch (error) {
console.log(` Failed to get Deezer image for ${artistName}`);
logger.debug(` Failed to get Deezer image for ${artistName}`);
}
}
@@ -165,9 +166,9 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
!lastFmImage.includes("2a96cbd8b46e442fc41c2b86b821562f")
) {
image = lastFmImage;
console.log(`Last.fm image for ${artistName}`);
logger.debug(`Last.fm image for ${artistName}`);
} else {
console.log(` Last.fm returned placeholder for ${artistName}`);
logger.debug(` Last.fm returned placeholder for ${artistName}`);
}
}
@@ -265,7 +266,7 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
return 0;
});
} catch (error) {
console.error(
logger.error(
`Failed to get discography for ${artistName}:`,
error
);
@@ -355,14 +356,14 @@ router.get("/discover/:nameOrMbid", async (req, res) => {
DISCOVERY_CACHE_TTL,
JSON.stringify(response)
);
console.log(`[Discovery] Cached artist: ${artistName}`);
logger.debug(`[Discovery] Cached artist: ${artistName}`);
} catch (err) {
// Redis errors are non-critical
}
res.json(response);
} catch (error: any) {
console.error("Artist discovery error:", error);
logger.error("Artist discovery error:", error);
res.status(500).json({
error: "Failed to fetch artist details",
message: error.message,
@@ -380,7 +381,7 @@ router.get("/album/:mbid", async (req, res) => {
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(`[Discovery] Cache hit for album: ${mbid}`);
logger.debug(`[Discovery] Cache hit for album: ${mbid}`);
return res.json(JSON.parse(cached));
}
} catch (err) {
@@ -397,7 +398,7 @@ router.get("/album/:mbid", async (req, res) => {
} catch (error: any) {
// If 404, try as a release instead
if (error.response?.status === 404) {
console.log(
logger.debug(
`${mbid} is not a release-group, trying as release...`
);
release = await musicBrainzService.getRelease(mbid);
@@ -410,7 +411,7 @@ router.get("/album/:mbid", async (req, res) => {
releaseGroupId
);
} catch (err) {
console.error(
logger.error(
`Failed to get release-group ${releaseGroupId}`
);
}
@@ -439,7 +440,7 @@ router.get("/album/:mbid", async (req, res) => {
albumTitle
);
} catch (error) {
console.log(`Failed to get Last.fm info for ${albumTitle}`);
logger.debug(`Failed to get Last.fm info for ${albumTitle}`);
}
// Get tracks - if we have release, use it directly; otherwise get first release from group
@@ -454,7 +455,7 @@ router.get("/album/:mbid", async (req, res) => {
);
tracks = releaseDetails.media?.[0]?.tracks || [];
} catch (error) {
console.error(
logger.error(
`Failed to get tracks for release ${firstRelease.id}`
);
}
@@ -472,14 +473,14 @@ router.get("/album/:mbid", async (req, res) => {
const response = await fetch(coverArtUrl, { method: "HEAD" });
if (response.ok) {
coverUrl = coverArtUrl;
console.log(`Cover Art Archive has cover for ${albumTitle}`);
logger.debug(`Cover Art Archive has cover for ${albumTitle}`);
} else {
console.log(
logger.debug(
`✗ Cover Art Archive 404 for ${albumTitle}, trying Deezer...`
);
}
} catch (error) {
console.log(
logger.debug(
`✗ Cover Art Archive check failed for ${albumTitle}, trying Deezer...`
);
}
@@ -493,13 +494,13 @@ router.get("/album/:mbid", async (req, res) => {
);
if (deezerCover) {
coverUrl = deezerCover;
console.log(`Deezer has cover for ${albumTitle}`);
logger.debug(`Deezer has cover for ${albumTitle}`);
} else {
// Final fallback to Cover Art Archive URL (might 404, but better than nothing)
coverUrl = coverArtUrl;
}
} catch (error) {
console.log(` Deezer lookup failed for ${albumTitle}`);
logger.debug(` Deezer lookup failed for ${albumTitle}`);
// Final fallback to Cover Art Archive URL
coverUrl = coverArtUrl;
}
@@ -548,14 +549,14 @@ router.get("/album/:mbid", async (req, res) => {
DISCOVERY_CACHE_TTL,
JSON.stringify(response)
);
console.log(`[Discovery] Cached album: ${albumTitle}`);
logger.debug(`[Discovery] Cached album: ${albumTitle}`);
} catch (err) {
// Redis errors are non-critical
}
res.json(response);
} catch (error: any) {
console.error("Album discovery error:", error);
logger.error("Album discovery error:", error);
res.status(500).json({
error: "Failed to fetch album details",
message: error.message,

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { audiobookshelfService } from "../services/audiobookshelf";
import { audiobookCacheService } from "../services/audiobookCache";
import { prisma } from "../utils/db";
@@ -57,7 +58,7 @@ router.get(
res.json(transformed);
} catch (error: any) {
console.error("Error fetching continue listening:", error);
logger.error("Error fetching continue listening:", error);
res.status(500).json({
error: "Failed to fetch continue listening",
message: error.message,
@@ -83,14 +84,14 @@ router.post("/sync", requireAuthOrToken, apiLimiter, async (req, res) => {
.json({ error: "Audiobookshelf not enabled" });
}
console.log("[Audiobooks] Starting manual audiobook sync...");
logger.debug("[Audiobooks] Starting manual audiobook sync...");
const result = await audiobookCacheService.syncAll();
// Check how many have series after sync
const seriesCount = await prisma.audiobook.count({
where: { series: { not: null } },
});
console.log(
logger.debug(
`[Audiobooks] Sync complete. Books with series: ${seriesCount}`
);
@@ -108,7 +109,7 @@ router.post("/sync", requireAuthOrToken, apiLimiter, async (req, res) => {
result,
});
} catch (error: any) {
console.error("Audiobook sync failed:", error);
logger.error("Audiobook sync failed:", error);
res.status(500).json({
error: "Sync failed",
message: error.message,
@@ -122,7 +123,7 @@ router.post("/sync", requireAuthOrToken, apiLimiter, async (req, res) => {
*/
// Debug endpoint for series data
router.get("/debug-series", requireAuthOrToken, async (req, res) => {
console.log("[Audiobooks] Debug series endpoint called");
logger.debug("[Audiobooks] Debug series endpoint called");
try {
const { getSystemSettings } = await import("../utils/systemSettings");
const settings = await getSystemSettings();
@@ -135,7 +136,7 @@ router.get("/debug-series", requireAuthOrToken, async (req, res) => {
// Get raw data from Audiobookshelf
const rawBooks = await audiobookshelfService.getAllAudiobooks();
console.log(
logger.debug(
`[Audiobooks] Got ${rawBooks.length} books from Audiobookshelf`
);
@@ -145,7 +146,7 @@ router.get("/debug-series", requireAuthOrToken, async (req, res) => {
return metadata.series || metadata.seriesName;
});
console.log(
logger.debug(
`[Audiobooks] Books with series data: ${booksWithSeries.length}`
);
@@ -179,7 +180,7 @@ router.get("/debug-series", requireAuthOrToken, async (req, res) => {
fullSampleWithSeries: fullSample,
});
} catch (error: any) {
console.error("[Audiobooks] Debug series error:", error);
logger.error("[Audiobooks] Debug series error:", error);
res.status(500).json({ error: error.message });
}
});
@@ -207,7 +208,7 @@ router.get("/search", requireAuthOrToken, apiLimiter, async (req, res) => {
const results = await audiobookshelfService.searchAudiobooks(q);
res.json(results);
} catch (error: any) {
console.error("Error searching audiobooks:", error);
logger.error("Error searching audiobooks:", error);
res.status(500).json({
error: "Failed to search audiobooks",
message: error.message,
@@ -220,7 +221,7 @@ router.get("/search", requireAuthOrToken, apiLimiter, async (req, res) => {
* Get all audiobooks from cached database (instant, no API calls)
*/
router.get("/", requireAuthOrToken, apiLimiter, async (req, res) => {
console.log("[Audiobooks] GET / - fetching audiobooks list");
logger.debug("[Audiobooks] GET / - fetching audiobooks list");
try {
// Check if Audiobookshelf is enabled first
const { getSystemSettings } = await import("../utils/systemSettings");
@@ -296,7 +297,7 @@ router.get("/", requireAuthOrToken, apiLimiter, async (req, res) => {
res.json(audiobooksWithProgress);
} catch (error: any) {
console.error("Error fetching audiobooks:", error);
logger.error("Error fetching audiobooks:", error);
res.status(500).json({
error: "Failed to fetch audiobooks",
message: error.message,
@@ -394,7 +395,7 @@ router.get(
res.json(seriesBooks);
} catch (error: any) {
console.error("Error fetching series:", error);
logger.error("Error fetching series:", error);
res.status(500).json({
error: "Failed to fetch series",
message: error.message,
@@ -419,7 +420,7 @@ router.options("/:id/cover", (req, res) => {
/**
* GET /audiobooks/:id/cover
* Serve cached cover image from local disk (instant, no proxying)
* Serve cached cover image from local disk, or proxy from Audiobookshelf if not cached
* NO RATE LIMITING - These are static files served from disk with aggressive caching
*/
router.get("/:id/cover", async (req, res) => {
@@ -431,7 +432,7 @@ router.get("/:id/cover", async (req, res) => {
const audiobook = await prisma.audiobook.findUnique({
where: { id },
select: { localCoverPath: true },
select: { localCoverPath: true, coverUrl: true },
});
let coverPath = audiobook?.localCoverPath;
@@ -456,25 +457,54 @@ router.get("/:id/cover", async (req, res) => {
}
}
if (!coverPath) {
return res.status(404).json({ error: "Cover not found" });
// If local cover exists, serve it
if (coverPath && fs.existsSync(coverPath)) {
const origin = req.headers.origin || "http://localhost:3030";
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
res.setHeader("Access-Control-Allow-Origin", origin);
res.setHeader("Access-Control-Allow-Credentials", "true");
res.setHeader("Cross-Origin-Resource-Policy", "cross-origin");
return res.sendFile(coverPath);
}
// Verify file exists before sending
if (!fs.existsSync(coverPath)) {
return res.status(404).json({ error: "Cover file missing" });
// Fallback: proxy from Audiobookshelf if coverUrl is available
if (audiobook?.coverUrl) {
const { getSystemSettings } = await import("../utils/systemSettings");
const settings = await getSystemSettings();
if (settings?.audiobookshelfUrl && settings?.audiobookshelfApiKey) {
const baseUrl = settings.audiobookshelfUrl.replace(/\/$/, "");
const coverApiUrl = `${baseUrl}/api/${audiobook.coverUrl}`;
try {
const response = await fetch(coverApiUrl, {
headers: {
Authorization: `Bearer ${settings.audiobookshelfApiKey}`,
},
});
if (response.ok) {
const origin = req.headers.origin || "http://localhost:3030";
res.setHeader("Content-Type", response.headers.get("content-type") || "image/jpeg");
res.setHeader("Cache-Control", "public, max-age=86400"); // 24 hours for proxied
res.setHeader("Access-Control-Allow-Origin", origin);
res.setHeader("Access-Control-Allow-Credentials", "true");
res.setHeader("Cross-Origin-Resource-Policy", "cross-origin");
// Stream the response body to client
const buffer = await response.arrayBuffer();
return res.send(Buffer.from(buffer));
}
} catch (proxyError: any) {
logger.error(`[Audiobook Cover] Proxy error for ${id}:`, proxyError.message);
}
}
}
// Serve image from local disk with aggressive caching and CORS headers
// Use specific origin instead of * to support credentials mode
const origin = req.headers.origin || "http://localhost:3030";
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
res.setHeader("Access-Control-Allow-Origin", origin);
res.setHeader("Access-Control-Allow-Credentials", "true");
res.setHeader("Cross-Origin-Resource-Policy", "cross-origin");
res.sendFile(coverPath);
// No cover available
return res.status(404).json({ error: "Cover not found" });
} catch (error: any) {
console.error("Error serving cover:", error);
logger.error("Error serving cover:", error);
res.status(500).json({
error: "Failed to serve cover",
message: error.message,
@@ -509,18 +539,22 @@ router.get("/:id", requireAuthOrToken, apiLimiter, async (req, res) => {
audiobook.lastSyncedAt <
new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)
) {
console.log(
logger.debug(
`[AUDIOBOOK] Audiobook ${id} not cached or stale, fetching...`
);
audiobook = await audiobookCacheService.getAudiobook(id);
}
if (!audiobook) {
return res.status(404).json({ error: "Audiobook not found" });
}
// Get chapters and audio files from API (these change less frequently)
let absBook;
try {
absBook = await audiobookshelfService.getAudiobook(id);
} catch (apiError: any) {
console.warn(
logger.warn(
` Failed to fetch live data from Audiobookshelf for ${id}, using cached data only:`,
apiError.message
);
@@ -567,7 +601,7 @@ router.get("/:id", requireAuthOrToken, apiLimiter, async (req, res) => {
res.json(response);
} catch (error: any) {
console.error("Error fetching audiobook__", error);
logger.error("Error fetching audiobook__", error);
res.status(500).json({
error: "Failed to fetch audiobook",
message: error.message,
@@ -581,17 +615,17 @@ router.get("/:id", requireAuthOrToken, apiLimiter, async (req, res) => {
*/
router.get("/:id/stream", requireAuthOrToken, async (req, res) => {
try {
console.log(
logger.debug(
`[Audiobook Stream] Request for audiobook: ${req.params.id}`
);
console.log(`[Audiobook Stream] User: ${req.user?.id || "unknown"}`);
logger.debug(`[Audiobook Stream] User: ${req.user?.id || "unknown"}`);
// Check if Audiobookshelf is enabled
const { getSystemSettings } = await import("../utils/systemSettings");
const settings = await getSystemSettings();
if (!settings?.audiobookshelfEnabled) {
console.log("[Audiobook Stream] Audiobookshelf not enabled");
logger.debug("[Audiobook Stream] Audiobookshelf not enabled");
return res
.status(503)
.json({ error: "Audiobookshelf is not configured" });
@@ -600,7 +634,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => {
const { id } = req.params;
const rangeHeader = req.headers.range as string | undefined;
console.log(
logger.debug(
`[Audiobook Stream] Fetching stream for ${id}, range: ${
rangeHeader || "none"
}`
@@ -609,7 +643,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => {
const { stream, headers, status } =
await audiobookshelfService.streamAudiobook(id, rangeHeader);
console.log(
logger.debug(
`[Audiobook Stream] Got stream, status: ${status}, content-type: ${headers["content-type"]}`
);
@@ -645,7 +679,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => {
stream.pipe(res);
stream.on("error", (error: any) => {
console.error("[Audiobook Stream] Stream error:", error);
logger.error("[Audiobook Stream] Stream error:", error);
if (!res.headersSent) {
res.status(500).json({
error: "Failed to stream audiobook",
@@ -656,7 +690,7 @@ router.get("/:id/stream", requireAuthOrToken, async (req, res) => {
}
});
} catch (error: any) {
console.error("[Audiobook Stream] Error:", error.message);
logger.error("[Audiobook Stream] Error:", error.message);
res.status(500).json({
error: "Failed to stream audiobook",
message: error.message,
@@ -704,30 +738,30 @@ router.post(
? Math.max(rawDuration, 0)
: 0;
console.log(`\n [AUDIOBOOK PROGRESS] Received update:`);
console.log(` User: ${req.user!.username}`);
console.log(` Audiobook ID: ${id}`);
console.log(
logger.debug(`\n [AUDIOBOOK PROGRESS] Received update:`);
logger.debug(` User: ${req.user!.username}`);
logger.debug(` Audiobook ID: ${id}`);
logger.debug(
` Current Time: ${currentTime}s (${Math.floor(
currentTime / 60
)} mins)`
);
console.log(
logger.debug(
` Duration: ${durationValue}s (${Math.floor(
durationValue / 60
)} mins)`
);
if (durationValue > 0) {
console.log(
logger.debug(
` Progress: ${(
(currentTime / durationValue) *
100
).toFixed(1)}%`
);
} else {
console.log(" Progress: duration unknown");
logger.debug(" Progress: duration unknown");
}
console.log(` Finished: ${!!isFinished}`);
logger.debug(` Finished: ${!!isFinished}`);
// Pull cached metadata to avoid hitting Audiobookshelf for every update
const [cachedAudiobook, existingProgress] = await Promise.all([
@@ -799,7 +833,7 @@ router.post(
},
});
console.log(` Progress saved to database`);
logger.debug(` Progress saved to database`);
// Also update progress in Audiobookshelf
try {
@@ -809,9 +843,9 @@ router.post(
fallbackDuration,
isFinished
);
console.log(` Progress synced to Audiobookshelf`);
logger.debug(` Progress synced to Audiobookshelf`);
} catch (error) {
console.error(
logger.error(
"Failed to sync progress to Audiobookshelf:",
error
);
@@ -830,7 +864,7 @@ router.post(
},
});
} catch (error: any) {
console.error("Error updating progress:", error);
logger.error("Error updating progress:", error);
res.status(500).json({
error: "Failed to update progress",
message: error.message,
@@ -864,9 +898,9 @@ router.delete(
const { id } = req.params;
console.log(`\n[AUDIOBOOK PROGRESS] Removing progress:`);
console.log(` User: ${req.user!.username}`);
console.log(` Audiobook ID: ${id}`);
logger.debug(`\n[AUDIOBOOK PROGRESS] Removing progress:`);
logger.debug(` User: ${req.user!.username}`);
logger.debug(` Audiobook ID: ${id}`);
// Delete progress from our database
await prisma.audiobookProgress.deleteMany({
@@ -876,14 +910,14 @@ router.delete(
},
});
console.log(` Progress removed from database`);
logger.debug(` Progress removed from database`);
// Also remove progress from Audiobookshelf
try {
await audiobookshelfService.updateProgress(id, 0, 0, false);
console.log(` Progress reset in Audiobookshelf`);
logger.debug(` Progress reset in Audiobookshelf`);
} catch (error) {
console.error(
logger.error(
"Failed to reset progress in Audiobookshelf:",
error
);
@@ -895,7 +929,7 @@ router.delete(
message: "Progress removed",
});
} catch (error: any) {
console.error("Error removing progress:", error);
logger.error("Error removing progress:", error);
res.status(500).json({
error: "Failed to remove progress",
message: error.message,

View File

@@ -1,11 +1,13 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import bcrypt from "bcrypt";
import { prisma } from "../utils/db";
import { z } from "zod";
import speakeasy from "speakeasy";
import QRCode from "qrcode";
import crypto from "crypto";
import { requireAuth, requireAdmin, generateToken } from "../middleware/auth";
import jwt from "jsonwebtoken";
import { requireAuth, requireAdmin, generateToken, generateRefreshToken } from "../middleware/auth";
import { encrypt, decrypt } from "../utils/encryption";
const router = Router();
@@ -119,15 +121,21 @@ router.post("/login", async (req, res) => {
}
}
// Generate JWT token
// Generate JWT tokens
const jwtToken = generateToken({
id: user.id,
username: user.username,
role: user.role,
tokenVersion: user.tokenVersion,
});
const refreshToken = generateRefreshToken({
id: user.id,
tokenVersion: user.tokenVersion,
});
res.json({
token: jwtToken,
refreshToken: refreshToken,
user: {
id: user.id,
username: user.username,
@@ -138,7 +146,7 @@ router.post("/login", async (req, res) => {
if (err instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid request", details: err.errors });
}
console.error("Login error:", err);
logger.error("Login error:", err);
res.status(500).json({ error: "Internal error" });
}
});
@@ -150,6 +158,47 @@ router.post("/logout", (req, res) => {
res.json({ message: "Logged out" });
});
// POST /auth/refresh - Refresh access token using refresh token
router.post("/refresh", async (req, res) => {
const { refreshToken } = req.body;
if (!refreshToken) {
return res.status(400).json({ error: "Refresh token required" });
}
try {
const decoded = jwt.verify(refreshToken, process.env.JWT_SECRET || process.env.SESSION_SECRET!) as any;
if (decoded.type !== "refresh") {
return res.status(401).json({ error: "Invalid refresh token" });
}
const user = await prisma.user.findUnique({
where: { id: decoded.userId },
select: { id: true, username: true, role: true, tokenVersion: true }
});
if (!user) {
return res.status(401).json({ error: "User not found" });
}
// Validate tokenVersion
if (decoded.tokenVersion !== user.tokenVersion) {
return res.status(401).json({ error: "Token invalidated" });
}
const newAccessToken = generateToken(user);
const newRefreshToken = generateRefreshToken(user);
return res.json({
token: newAccessToken,
refreshToken: newRefreshToken
});
} catch (error) {
return res.status(401).json({ error: "Invalid refresh token" });
}
});
/**
* @openapi
* /auth/me:
@@ -226,16 +275,19 @@ router.post("/change-password", requireAuth, async (req, res) => {
.json({ error: "Current password is incorrect" });
}
// Update password
// Update password and increment tokenVersion to invalidate all existing tokens
const newPasswordHash = await bcrypt.hash(newPassword, 10);
await prisma.user.update({
where: { id: req.user!.id },
data: { passwordHash: newPasswordHash },
data: {
passwordHash: newPasswordHash,
tokenVersion: { increment: 1 }
},
});
res.json({ message: "Password changed successfully" });
} catch (error) {
console.error("Change password error:", error);
logger.error("Change password error:", error);
res.status(500).json({ error: "Failed to change password" });
}
});
@@ -256,7 +308,7 @@ router.get("/users", requireAuth, requireAdmin, async (req, res) => {
res.json(users);
} catch (error) {
console.error("Get users error:", error);
logger.error("Get users error:", error);
res.status(500).json({ error: "Failed to get users" });
}
});
@@ -320,7 +372,7 @@ router.post("/create-user", requireAuth, requireAdmin, async (req, res) => {
createdAt: user.createdAt,
});
} catch (error) {
console.error("Create user error:", error);
logger.error("Create user error:", error);
res.status(500).json({ error: "Failed to create user" });
}
});
@@ -344,7 +396,7 @@ router.delete("/users/:id", requireAuth, requireAdmin, async (req, res) => {
res.json({ message: "User deleted successfully" });
} catch (error: any) {
console.error("Delete user error:", error);
logger.error("Delete user error:", error);
if (error.code === "P2025") {
return res.status(404).json({ error: "User not found" });
}
@@ -382,7 +434,7 @@ router.post("/2fa/setup", requireAuth, async (req, res) => {
qrCode: qrCodeDataUrl,
});
} catch (error) {
console.error("2FA setup error:", error);
logger.error("2FA setup error:", error);
res.status(500).json({ error: "Failed to setup 2FA" });
}
});
@@ -448,7 +500,7 @@ router.post("/2fa/enable", requireAuth, async (req, res) => {
recoveryCodes: recoveryCodes,
});
} catch (error) {
console.error("2FA enable error:", error);
logger.error("2FA enable error:", error);
res.status(500).json({ error: "Failed to enable 2FA" });
}
});
@@ -505,7 +557,7 @@ router.post("/2fa/disable", requireAuth, async (req, res) => {
res.json({ message: "2FA disabled successfully" });
} catch (error) {
console.error("2FA disable error:", error);
logger.error("2FA disable error:", error);
res.status(500).json({ error: "Failed to disable 2FA" });
}
});
@@ -524,7 +576,7 @@ router.get("/2fa/status", requireAuth, async (req, res) => {
res.json({ enabled: user.twoFactorEnabled });
} catch (error) {
console.error("2FA status error:", error);
logger.error("2FA status error:", error);
res.status(500).json({ error: "Failed to get 2FA status" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuthOrToken } from "../middleware/auth";
import { spotifyService } from "../services/spotify";
import { deezerService, DeezerPlaylistPreview, DeezerRadioStation } from "../services/deezer";
@@ -68,10 +69,10 @@ function deezerRadioToUnified(radio: DeezerRadioStation): PlaylistPreview {
router.get("/playlists/featured", async (req, res) => {
try {
const limit = Math.min(parseInt(req.query.limit as string) || 50, 200);
console.log(`[Browse] Fetching featured playlists (limit: ${limit})...`);
logger.debug(`[Browse] Fetching featured playlists (limit: ${limit})...`);
const playlists = await deezerService.getFeaturedPlaylists(limit);
console.log(`[Browse] Got ${playlists.length} Deezer playlists`);
logger.debug(`[Browse] Got ${playlists.length} Deezer playlists`);
res.json({
playlists: playlists.map(deezerPlaylistToUnified),
@@ -79,7 +80,7 @@ router.get("/playlists/featured", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Browse featured playlists error:", error);
logger.error("Browse featured playlists error:", error);
res.status(500).json({ error: error.message || "Failed to fetch playlists" });
}
});
@@ -96,10 +97,10 @@ router.get("/playlists/search", async (req, res) => {
}
const limit = Math.min(parseInt(req.query.limit as string) || 50, 100);
console.log(`[Browse] Searching playlists for "${query}"...`);
logger.debug(`[Browse] Searching playlists for "${query}"...`);
const playlists = await deezerService.searchPlaylists(query, limit);
console.log(`[Browse] Search "${query}": ${playlists.length} results`);
logger.debug(`[Browse] Search "${query}": ${playlists.length} results`);
res.json({
playlists: playlists.map(deezerPlaylistToUnified),
@@ -108,7 +109,7 @@ router.get("/playlists/search", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Browse search playlists error:", error);
logger.error("Browse search playlists error:", error);
res.status(500).json({ error: error.message || "Failed to search playlists" });
}
});
@@ -132,7 +133,7 @@ router.get("/playlists/:id", async (req, res) => {
url: `https://www.deezer.com/playlist/${id}`,
});
} catch (error: any) {
console.error("Playlist fetch error:", error);
logger.error("Playlist fetch error:", error);
res.status(500).json({ error: error.message || "Failed to fetch playlist" });
}
});
@@ -147,7 +148,7 @@ router.get("/playlists/:id", async (req, res) => {
*/
router.get("/radios", async (req, res) => {
try {
console.log("[Browse] Fetching radio stations...");
logger.debug("[Browse] Fetching radio stations...");
const radios = await deezerService.getRadioStations();
res.json({
@@ -156,7 +157,7 @@ router.get("/radios", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Browse radios error:", error);
logger.error("Browse radios error:", error);
res.status(500).json({ error: error.message || "Failed to fetch radios" });
}
});
@@ -167,7 +168,7 @@ router.get("/radios", async (req, res) => {
*/
router.get("/radios/by-genre", async (req, res) => {
try {
console.log("[Browse] Fetching radios by genre...");
logger.debug("[Browse] Fetching radios by genre...");
const genresWithRadios = await deezerService.getRadiosByGenre();
// Transform to include unified format
@@ -183,7 +184,7 @@ router.get("/radios/by-genre", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Browse radios by genre error:", error);
logger.error("Browse radios by genre error:", error);
res.status(500).json({ error: error.message || "Failed to fetch radios" });
}
});
@@ -195,7 +196,7 @@ router.get("/radios/by-genre", async (req, res) => {
router.get("/radios/:id", async (req, res) => {
try {
const { id } = req.params;
console.log(`[Browse] Fetching radio ${id} tracks...`);
logger.debug(`[Browse] Fetching radio ${id} tracks...`);
const radioPlaylist = await deezerService.getRadioTracks(id);
@@ -209,7 +210,7 @@ router.get("/radios/:id", async (req, res) => {
type: "radio",
});
} catch (error: any) {
console.error("Radio tracks error:", error);
logger.error("Radio tracks error:", error);
res.status(500).json({ error: error.message || "Failed to fetch radio tracks" });
}
});
@@ -224,7 +225,7 @@ router.get("/radios/:id", async (req, res) => {
*/
router.get("/genres", async (req, res) => {
try {
console.log("[Browse] Fetching genres...");
logger.debug("[Browse] Fetching genres...");
const genres = await deezerService.getGenres();
res.json({
@@ -233,7 +234,7 @@ router.get("/genres", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Browse genres error:", error);
logger.error("Browse genres error:", error);
res.status(500).json({ error: error.message || "Failed to fetch genres" });
}
});
@@ -249,7 +250,7 @@ router.get("/genres/:id", async (req, res) => {
return res.status(400).json({ error: "Invalid genre ID" });
}
console.log(`[Browse] Fetching content for genre ${genreId}...`);
logger.debug(`[Browse] Fetching content for genre ${genreId}...`);
const content = await deezerService.getEditorialContent(genreId);
res.json({
@@ -259,7 +260,7 @@ router.get("/genres/:id", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Genre content error:", error);
logger.error("Genre content error:", error);
res.status(500).json({ error: error.message || "Failed to fetch genre content" });
}
});
@@ -290,7 +291,7 @@ router.get("/genres/:id/playlists", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Genre playlists error:", error);
logger.error("Genre playlists error:", error);
res.status(500).json({ error: error.message || "Failed to fetch genre playlists" });
}
});
@@ -337,7 +338,7 @@ router.post("/playlists/parse", async (req, res) => {
error: "Invalid or unsupported URL. Please provide a Spotify or Deezer playlist URL."
});
} catch (error: any) {
console.error("Parse URL error:", error);
logger.error("Parse URL error:", error);
res.status(500).json({ error: error.message || "Failed to parse URL" });
}
});
@@ -353,7 +354,7 @@ router.post("/playlists/parse", async (req, res) => {
*/
router.get("/all", async (req, res) => {
try {
console.log("[Browse] Fetching browse content (playlists + genres)...");
logger.debug("[Browse] Fetching browse content (playlists + genres)...");
// Only fetch playlists and genres - radios are now internal library-based
const [playlists, genres] = await Promise.all([
@@ -369,7 +370,7 @@ router.get("/all", async (req, res) => {
source: "deezer",
});
} catch (error: any) {
console.error("Browse all error:", error);
logger.error("Browse all error:", error);
res.status(500).json({ error: error.message || "Failed to fetch browse content" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuthOrToken } from "../middleware/auth";
import { prisma } from "../utils/db";
import crypto from "crypto";
@@ -64,7 +65,7 @@ router.post("/generate", requireAuthOrToken, async (req, res) => {
expiresIn: 300, // 5 minutes in seconds
});
} catch (error) {
console.error("Generate device link code error:", error);
logger.error("Generate device link code error:", error);
res.status(500).json({ error: "Failed to generate device link code" });
}
});
@@ -123,7 +124,7 @@ router.post("/verify", async (req, res) => {
username: linkCode.user.username,
});
} catch (error) {
console.error("Verify device link code error:", error);
logger.error("Verify device link code error:", error);
res.status(500).json({ error: "Failed to verify device link code" });
}
});
@@ -161,7 +162,7 @@ router.get("/status/:code", async (req, res) => {
expiresAt: linkCode.expiresAt,
});
} catch (error) {
console.error("Check device link status error:", error);
logger.error("Check device link status error:", error);
res.status(500).json({ error: "Failed to check status" });
}
});
@@ -184,7 +185,7 @@ router.get("/devices", requireAuthOrToken, async (req, res) => {
res.json(apiKeys);
} catch (error) {
console.error("Get devices error:", error);
logger.error("Get devices error:", error);
res.status(500).json({ error: "Failed to get devices" });
}
});
@@ -209,7 +210,7 @@ router.delete("/devices/:id", requireAuthOrToken, async (req, res) => {
res.json({ success: true });
} catch (error) {
console.error("Revoke device error:", error);
logger.error("Revoke device error:", error);
res.status(500).json({ error: "Failed to revoke device" });
}
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,11 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuthOrToken } from "../middleware/auth";
import { prisma } from "../utils/db";
import { config } from "../config";
import { lidarrService } from "../services/lidarr";
import { musicBrainzService } from "../services/musicbrainz";
import { lastFmService } from "../services/lastfm";
import { simpleDownloadManager } from "../services/simpleDownloadManager";
import crypto from "crypto";
@@ -11,6 +13,78 @@ const router = Router();
router.use(requireAuthOrToken);
/**
* Verify and potentially correct artist name before download
* Uses multiple sources for canonical name resolution:
* 1. MusicBrainz (if MBID provided) - most authoritative
* 2. LastFM correction API - handles aliases and misspellings
* 3. Original name - fallback
*
* @returns Object with verified name and whether correction was applied
*/
async function verifyArtistName(
artistName: string,
artistMbid?: string
): Promise<{
verifiedName: string;
wasCorrected: boolean;
source: "musicbrainz" | "lastfm" | "original";
originalName: string;
}> {
const originalName = artistName;
// Strategy 1: If we have MBID, use MusicBrainz as authoritative source
if (artistMbid) {
try {
const mbArtist = await musicBrainzService.getArtist(artistMbid);
if (mbArtist?.name) {
return {
verifiedName: mbArtist.name,
wasCorrected:
mbArtist.name.toLowerCase() !==
artistName.toLowerCase(),
source: "musicbrainz",
originalName,
};
}
} catch (error) {
logger.warn(
`MusicBrainz lookup failed for MBID ${artistMbid}:`,
error
);
}
}
// Strategy 2: Use LastFM correction API
try {
const correction = await lastFmService.getArtistCorrection(artistName);
if (correction?.corrected) {
logger.debug(
`[VERIFY] LastFM correction: "${artistName}" → "${correction.canonicalName}"`
);
return {
verifiedName: correction.canonicalName,
wasCorrected: true,
source: "lastfm",
originalName,
};
}
} catch (error) {
logger.warn(
`LastFM correction lookup failed for "${artistName}":`,
error
);
}
// Strategy 3: Return original name
return {
verifiedName: artistName,
wasCorrected: false,
source: "original",
originalName,
};
}
// POST /downloads - Create download job
router.post("/", async (req, res) => {
try {
@@ -75,6 +149,18 @@ router.post("/", async (req, res) => {
});
}
// Single album download - verify artist name before proceeding
let verifiedArtistName = artistName;
if (type === "album" && artistName) {
const verification = await verifyArtistName(artistName, mbid);
if (verification.wasCorrected) {
logger.debug(
`[DOWNLOAD] Artist name verified: "${artistName}" → "${verification.verifiedName}" (source: ${verification.source})`
);
verifiedArtistName = verification.verifiedName;
}
}
// Single album download - check for existing job first
const existingJob = await prisma.downloadJob.findFirst({
where: {
@@ -84,7 +170,9 @@ router.post("/", async (req, res) => {
});
if (existingJob) {
console.log(`[DOWNLOAD] Job already exists for ${mbid}: ${existingJob.id} (${existingJob.status})`);
logger.debug(
`[DOWNLOAD] Job already exists for ${mbid}: ${existingJob.id} (${existingJob.status})`
);
return res.json({
id: existingJob.id,
status: existingJob.status,
@@ -105,13 +193,13 @@ router.post("/", async (req, res) => {
metadata: {
downloadType,
rootFolderPath,
artistName,
artistName: verifiedArtistName,
albumTitle,
},
},
});
console.log(
logger.debug(
`[DOWNLOAD] Triggering Lidarr: ${type} "${subject}" -> ${rootFolderPath}`
);
@@ -122,10 +210,10 @@ router.post("/", async (req, res) => {
mbid,
subject,
rootFolderPath,
artistName,
verifiedArtistName,
albumTitle
).catch((error) => {
console.error(
logger.error(
`Download processing failed for job ${job.id}:`,
error
);
@@ -139,7 +227,7 @@ router.post("/", async (req, res) => {
message: "Download job created. Processing in background.",
});
} catch (error) {
console.error("Create download job error:", error);
logger.error("Create download job error:", error);
res.status(500).json({ error: "Failed to create download job" });
}
});
@@ -154,27 +242,66 @@ async function processArtistDownload(
rootFolderPath: string,
downloadType: string
): Promise<{ id: string; subject: string }[]> {
console.log(`\n Processing artist download: ${artistName}`);
console.log(` Artist MBID: ${artistMbid}`);
logger.debug(`\n Processing artist download: ${artistName}`);
logger.debug(` Artist MBID: ${artistMbid}`);
// Generate a batch ID to group all album downloads
const batchId = crypto.randomUUID();
console.log(` Batch ID: ${batchId}`);
logger.debug(` Batch ID: ${batchId}`);
// CRITICAL FIX: Resolve canonical artist name from MusicBrainz
// Last.fm may return aliases (e.g., "blink" for "blink-182")
// Lidarr needs the official name to find the correct artist
let canonicalArtistName = artistName;
try {
logger.debug(` Resolving canonical artist name from MusicBrainz...`);
const mbArtist = await musicBrainzService.getArtist(artistMbid);
if (mbArtist && mbArtist.name) {
canonicalArtistName = mbArtist.name;
if (canonicalArtistName !== artistName) {
logger.debug(
` ✓ Canonical name resolved: "${artistName}" → "${canonicalArtistName}"`
);
} else {
logger.debug(
` ✓ Name matches canonical: "${canonicalArtistName}"`
);
}
}
} catch (mbError: any) {
logger.warn(` ⚠ MusicBrainz lookup failed: ${mbError.message}`);
// Fallback to LastFM correction
try {
const correction = await lastFmService.getArtistCorrection(
artistName
);
if (correction?.canonicalName) {
canonicalArtistName = correction.canonicalName;
logger.debug(
` ✓ Name resolved via LastFM: "${artistName}" → "${canonicalArtistName}"`
);
}
} catch (lfmError) {
logger.warn(
` ⚠ LastFM correction also failed, using original name`
);
}
}
try {
// First, add the artist to Lidarr (this monitors all albums)
const lidarrArtist = await lidarrService.addArtist(
artistMbid,
artistName,
canonicalArtistName,
rootFolderPath
);
if (!lidarrArtist) {
console.log(` Failed to add artist to Lidarr`);
logger.debug(` Failed to add artist to Lidarr`);
throw new Error("Failed to add artist to Lidarr");
}
console.log(` Artist added to Lidarr (ID: ${lidarrArtist.id})`);
logger.debug(` Artist added to Lidarr (ID: ${lidarrArtist.id})`);
// Fetch albums from MusicBrainz
const releaseGroups = await musicBrainzService.getReleaseGroups(
@@ -183,12 +310,12 @@ async function processArtistDownload(
100
);
console.log(
logger.debug(
` Found ${releaseGroups.length} albums/EPs from MusicBrainz`
);
if (releaseGroups.length === 0) {
console.log(` No albums found for artist`);
logger.debug(` No albums found for artist`);
return [];
}
@@ -206,49 +333,84 @@ async function processArtistDownload(
});
if (existingAlbum) {
console.log(` Skipping "${albumTitle}" - already in library`);
logger.debug(` Skipping "${albumTitle}" - already in library`);
continue;
}
// Check if there's already a pending/processing job for this album
const existingJob = await prisma.downloadJob.findFirst({
where: {
targetMbid: albumMbid,
status: { in: ["pending", "processing"] },
},
// Use transaction to prevent race conditions when creating jobs
const jobResult = await prisma.$transaction(async (tx) => {
// Check for existing active job
const existingJob = await tx.downloadJob.findFirst({
where: {
targetMbid: albumMbid,
status: { in: ["pending", "processing"] },
},
});
if (existingJob) {
return {
skipped: true,
job: existingJob,
reason: "already_queued",
};
}
// Also check for recently failed job (within last 30 seconds) to prevent spam retries
const recentFailed = await tx.downloadJob.findFirst({
where: {
targetMbid: albumMbid,
status: "failed",
completedAt: { gte: new Date(Date.now() - 30000) },
},
});
if (recentFailed) {
return {
skipped: true,
job: recentFailed,
reason: "recently_failed",
};
}
// Create new job inside transaction
const now = new Date();
const job = await tx.downloadJob.create({
data: {
userId,
subject: albumSubject,
type: "album",
targetMbid: albumMbid,
status: "pending",
metadata: {
downloadType,
rootFolderPath,
artistName,
artistMbid,
albumTitle,
batchId, // Link all albums in this artist download
batchArtist: artistName,
createdAt: now.toISOString(), // Track when job was created for timeout
},
},
});
return { skipped: false, job };
});
if (existingJob) {
console.log(
` Skipping "${albumTitle}" - already in download queue`
if (jobResult.skipped) {
logger.debug(
` Skipping "${albumTitle}" - ${
jobResult.reason === "recently_failed"
? "recently failed"
: "already in download queue"
}`
);
continue;
}
// Create download job for this album
const now = new Date();
const job = await prisma.downloadJob.create({
data: {
userId,
subject: albumSubject,
type: "album",
targetMbid: albumMbid,
status: "pending",
metadata: {
downloadType,
rootFolderPath,
artistName,
artistMbid,
albumTitle,
batchId, // Link all albums in this artist download
batchArtist: artistName,
createdAt: now.toISOString(), // Track when job was created for timeout
},
},
});
const job = jobResult.job;
jobs.push({ id: job.id, subject: albumSubject });
console.log(` [JOB] Created job for: ${albumSubject}`);
logger.debug(` [JOB] Created job for: ${albumSubject}`);
// Start the download in background
processDownload(
@@ -260,14 +422,14 @@ async function processArtistDownload(
artistName,
albumTitle
).catch((error) => {
console.error(`Download failed for ${albumSubject}:`, error);
logger.error(`Download failed for ${albumSubject}:`, error);
});
}
console.log(` Created ${jobs.length} album download jobs`);
logger.debug(` Created ${jobs.length} album download jobs`);
return jobs;
} catch (error: any) {
console.error(` Failed to process artist download:`, error.message);
logger.error(` Failed to process artist download:`, error.message);
throw error;
}
}
@@ -284,7 +446,7 @@ async function processDownload(
) {
const job = await prisma.downloadJob.findUnique({ where: { id: jobId } });
if (!job) {
console.error(`Job ${jobId} not found`);
logger.error(`Job ${jobId} not found`);
return;
}
@@ -304,7 +466,7 @@ async function processDownload(
}
}
console.log(`Parsed: Artist="${parsedArtist}", Album="${parsedAlbum}"`);
logger.debug(`Parsed: Artist="${parsedArtist}", Album="${parsedAlbum}"`);
// Use simple download manager for album downloads
const result = await simpleDownloadManager.startDownload(
@@ -316,7 +478,7 @@ async function processDownload(
);
if (!result.success) {
console.error(`Failed to start download: ${result.error}`);
logger.error(`Failed to start download: ${result.error}`);
}
}
}
@@ -335,12 +497,12 @@ router.delete("/clear-all", async (req, res) => {
const result = await prisma.downloadJob.deleteMany({ where });
console.log(
logger.debug(
` Cleared ${result.count} download jobs for user ${userId}`
);
res.json({ success: true, deleted: result.count });
} catch (error) {
console.error("Clear downloads error:", error);
logger.error("Clear downloads error:", error);
res.status(500).json({ error: "Failed to clear downloads" });
}
});
@@ -355,7 +517,7 @@ router.post("/clear-lidarr-queue", async (req, res) => {
errors: result.errors,
});
} catch (error: any) {
console.error("Clear Lidarr queue error:", error);
logger.error("Clear Lidarr queue error:", error);
res.status(500).json({ error: "Failed to clear Lidarr queue" });
}
});
@@ -373,7 +535,7 @@ router.get("/failed", async (req, res) => {
res.json(failedAlbums);
} catch (error) {
console.error("List failed albums error:", error);
logger.error("List failed albums error:", error);
res.status(500).json({ error: "Failed to list failed albums" });
}
});
@@ -399,7 +561,7 @@ router.delete("/failed/:id", async (req, res) => {
res.json({ success: true });
} catch (error) {
console.error("Delete failed album error:", error);
logger.error("Delete failed album error:", error);
res.status(500).json({ error: "Failed to delete failed album" });
}
});
@@ -423,7 +585,7 @@ router.get("/:id", async (req, res) => {
res.json(job);
} catch (error) {
console.error("Get download job error:", error);
logger.error("Get download job error:", error);
res.status(500).json({ error: "Failed to get download job" });
}
});
@@ -456,7 +618,7 @@ router.patch("/:id", async (req, res) => {
res.json(updated);
} catch (error) {
console.error("Update download job error:", error);
logger.error("Update download job error:", error);
res.status(500).json({ error: "Failed to update download job" });
}
});
@@ -479,8 +641,8 @@ router.delete("/:id", async (req, res) => {
// Return success even if nothing was deleted (idempotent delete)
res.json({ success: true, deleted: result.count > 0 });
} catch (error: any) {
console.error("Delete download job error:", error);
console.error("Error details:", error.message, error.stack);
logger.error("Delete download job error:", error);
logger.error("Error details:", error.message, error.stack);
res.status(500).json({
error: "Failed to delete download job",
details: error.message,
@@ -492,7 +654,12 @@ router.delete("/:id", async (req, res) => {
router.get("/", async (req, res) => {
try {
const userId = req.user!.id;
const { status, limit = "50", includeDiscovery = "false", includeCleared = "false" } = req.query;
const {
status,
limit = "50",
includeDiscovery = "false",
includeCleared = "false",
} = req.query;
const where: any = { userId };
if (status) {
@@ -521,7 +688,7 @@ router.get("/", async (req, res) => {
res.json(filteredJobs);
} catch (error) {
console.error("List download jobs error:", error);
logger.error("List download jobs error:", error);
res.status(500).json({ error: "Failed to list download jobs" });
}
});
@@ -580,7 +747,7 @@ router.post("/keep-track", async (req, res) => {
"Track marked as kept. Please add the full album manually to your /music folder.",
});
} catch (error) {
console.error("Keep track error:", error);
logger.error("Keep track error:", error);
res.status(500).json({ error: "Failed to keep track" });
}
});

View File

@@ -1,7 +1,19 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth, requireAdmin } from "../middleware/auth";
import { enrichmentService } from "../services/enrichment";
import { getEnrichmentProgress, runFullEnrichment } from "../workers/unifiedEnrichment";
import {
getEnrichmentProgress,
runFullEnrichment,
} from "../workers/unifiedEnrichment";
import { enrichmentStateService } from "../services/enrichmentState";
import { enrichmentFailureService } from "../services/enrichmentFailureService";
import {
getSystemSettings,
invalidateSystemSettingsCache,
} from "../utils/systemSettings";
import { rateLimiter } from "../services/rateLimiter";
import { redisClient } from "../utils/redis";
const router = Router();
@@ -16,11 +28,82 @@ router.get("/progress", async (req, res) => {
const progress = await getEnrichmentProgress();
res.json(progress);
} catch (error) {
console.error("Get enrichment progress error:", error);
logger.error("Get enrichment progress error:", error);
res.status(500).json({ error: "Failed to get progress" });
}
});
/**
* GET /enrichment/status
* Get detailed enrichment state (running, paused, etc.)
*/
router.get("/status", async (req, res) => {
try {
const state = await enrichmentStateService.getState();
res.json(state || { status: "idle", currentPhase: null });
} catch (error) {
logger.error("Get enrichment status error:", error);
res.status(500).json({ error: "Failed to get status" });
}
});
/**
* POST /enrichment/pause
* Pause the enrichment process
*/
router.post("/pause", requireAdmin, async (req, res) => {
try {
const state = await enrichmentStateService.pause();
res.json({
message: "Enrichment paused",
state,
});
} catch (error: any) {
logger.error("Pause enrichment error:", error);
res.status(400).json({
error: error.message || "Failed to pause enrichment",
});
}
});
/**
* POST /enrichment/resume
* Resume a paused enrichment process
*/
router.post("/resume", requireAdmin, async (req, res) => {
try {
const state = await enrichmentStateService.resume();
res.json({
message: "Enrichment resumed",
state,
});
} catch (error: any) {
logger.error("Resume enrichment error:", error);
res.status(400).json({
error: error.message || "Failed to resume enrichment",
});
}
});
/**
* POST /enrichment/stop
* Stop the enrichment process
*/
router.post("/stop", requireAdmin, async (req, res) => {
try {
const state = await enrichmentStateService.stop();
res.json({
message: "Enrichment stopping...",
state,
});
} catch (error: any) {
logger.error("Stop enrichment error:", error);
res.status(400).json({
error: error.message || "Failed to stop enrichment",
});
}
});
/**
* POST /enrichment/full
* Trigger full enrichment (re-enriches everything regardless of status)
@@ -29,20 +112,48 @@ router.get("/progress", async (req, res) => {
router.post("/full", requireAdmin, async (req, res) => {
try {
// This runs in the background
runFullEnrichment().catch(err => {
console.error("Full enrichment error:", err);
runFullEnrichment().catch((err) => {
logger.error("Full enrichment error:", err);
});
res.json({
res.json({
message: "Full enrichment started",
description: "All artists, track tags, and audio analysis will be re-processed"
description:
"All artists, track tags, and audio analysis will be re-processed",
});
} catch (error) {
console.error("Trigger full enrichment error:", error);
logger.error("Trigger full enrichment error:", error);
res.status(500).json({ error: "Failed to start full enrichment" });
}
});
/**
* POST /enrichment/sync
* Trigger incremental enrichment (only processes pending items)
* Fast sync that picks up new content without re-processing everything
*/
router.post("/sync", async (req, res) => {
try {
const { triggerEnrichmentNow } = await import(
"../workers/unifiedEnrichment"
);
// Trigger immediate enrichment cycle (incremental mode)
const result = await triggerEnrichmentNow();
res.json({
message: "Incremental sync started",
description: "Processing new and pending items only",
result,
});
} catch (error: any) {
logger.error("Trigger sync error:", error);
res.status(500).json({
error: error.message || "Failed to start sync",
});
}
});
/**
* GET /enrichment/settings
* Get enrichment settings for current user
@@ -53,7 +164,7 @@ router.get("/settings", async (req, res) => {
const settings = await enrichmentService.getSettings(userId);
res.json(settings);
} catch (error) {
console.error("Get enrichment settings error:", error);
logger.error("Get enrichment settings error:", error);
res.status(500).json({ error: "Failed to get settings" });
}
});
@@ -65,10 +176,13 @@ router.get("/settings", async (req, res) => {
router.put("/settings", async (req, res) => {
try {
const userId = req.user!.id;
const settings = await enrichmentService.updateSettings(userId, req.body);
const settings = await enrichmentService.updateSettings(
userId,
req.body
);
res.json(settings);
} catch (error) {
console.error("Update enrichment settings error:", error);
logger.error("Update enrichment settings error:", error);
res.status(500).json({ error: "Failed to update settings" });
}
});
@@ -86,14 +200,20 @@ router.post("/artist/:id", async (req, res) => {
return res.status(400).json({ error: "Enrichment is not enabled" });
}
const enrichmentData = await enrichmentService.enrichArtist(req.params.id, settings);
const enrichmentData = await enrichmentService.enrichArtist(
req.params.id,
settings
);
if (!enrichmentData) {
return res.status(404).json({ error: "No enrichment data found" });
}
if (enrichmentData.confidence > 0.3) {
await enrichmentService.applyArtistEnrichment(req.params.id, enrichmentData);
await enrichmentService.applyArtistEnrichment(
req.params.id,
enrichmentData
);
}
res.json({
@@ -102,8 +222,10 @@ router.post("/artist/:id", async (req, res) => {
data: enrichmentData,
});
} catch (error: any) {
console.error("Enrich artist error:", error);
res.status(500).json({ error: error.message || "Failed to enrich artist" });
logger.error("Enrich artist error:", error);
res.status(500).json({
error: error.message || "Failed to enrich artist",
});
}
});
@@ -120,14 +242,20 @@ router.post("/album/:id", async (req, res) => {
return res.status(400).json({ error: "Enrichment is not enabled" });
}
const enrichmentData = await enrichmentService.enrichAlbum(req.params.id, settings);
const enrichmentData = await enrichmentService.enrichAlbum(
req.params.id,
settings
);
if (!enrichmentData) {
return res.status(404).json({ error: "No enrichment data found" });
}
if (enrichmentData.confidence > 0.3) {
await enrichmentService.applyAlbumEnrichment(req.params.id, enrichmentData);
await enrichmentService.applyAlbumEnrichment(
req.params.id,
enrichmentData
);
}
res.json({
@@ -136,8 +264,10 @@ router.post("/album/:id", async (req, res) => {
data: enrichmentData,
});
} catch (error: any) {
console.error("Enrich album error:", error);
res.status(500).json({ error: error.message || "Failed to enrich album" });
logger.error("Enrich album error:", error);
res.status(500).json({
error: error.message || "Failed to enrich album",
});
}
});
@@ -148,7 +278,9 @@ router.post("/album/:id", async (req, res) => {
router.post("/start", async (req, res) => {
try {
const userId = req.user!.id;
const { notificationService } = await import("../services/notificationService");
const { notificationService } = await import(
"../services/notificationService"
);
// Check if enrichment is enabled in system settings
const { prisma } = await import("../utils/db");
@@ -158,7 +290,9 @@ router.post("/start", async (req, res) => {
});
if (!systemSettings?.autoEnrichMetadata) {
return res.status(400).json({ error: "Enrichment is not enabled. Enable it in settings first." });
return res.status(400).json({
error: "Enrichment is not enabled. Enable it in settings first.",
});
}
// Get user enrichment settings or use defaults
@@ -175,50 +309,282 @@ router.post("/start", async (req, res) => {
);
// Start enrichment in background
enrichmentService.enrichLibrary(userId).then(async () => {
// Send notification when complete
await notificationService.notifySystem(
userId,
"Library Enrichment Complete",
"All artist metadata has been enriched"
);
}).catch(async (error) => {
console.error("Background enrichment failed:", error);
await notificationService.create({
userId,
type: "error",
title: "Enrichment Failed",
message: error.message || "Failed to enrich library metadata",
enrichmentService
.enrichLibrary(userId)
.then(async () => {
// Send notification when complete
await notificationService.notifySystem(
userId,
"Library Enrichment Complete",
"All artist metadata has been enriched"
);
})
.catch(async (error) => {
logger.error("Background enrichment failed:", error);
await notificationService.create({
userId,
type: "error",
title: "Enrichment Failed",
message:
error.message || "Failed to enrich library metadata",
});
});
});
res.json({
success: true,
message: "Library enrichment started in background",
});
} catch (error: any) {
console.error("Start enrichment error:", error);
res.status(500).json({ error: error.message || "Failed to start enrichment" });
logger.error("Start enrichment error:", error);
res.status(500).json({
error: error.message || "Failed to start enrichment",
});
}
});
/**
* PUT /library/artists/:id/metadata
* Update artist metadata manually
* GET /enrichment/failures
* Get all enrichment failures with filtering
*/
router.get("/failures", async (req, res) => {
try {
const { entityType, includeSkipped, includeResolved, limit, offset } =
req.query;
const options: any = {};
if (entityType) options.entityType = entityType as string;
if (includeSkipped === "true") options.includeSkipped = true;
if (includeResolved === "true") options.includeResolved = true;
if (limit) options.limit = parseInt(limit as string);
if (offset) options.offset = parseInt(offset as string);
const result = await enrichmentFailureService.getFailures(options);
res.json(result);
} catch (error) {
logger.error("Get failures error:", error);
res.status(500).json({ error: "Failed to get failures" });
}
});
/**
* GET /enrichment/failures/counts
* Get failure counts by type
*/
router.get("/failures/counts", async (req, res) => {
try {
const counts = await enrichmentFailureService.getFailureCounts();
res.json(counts);
} catch (error) {
logger.error("Get failure counts error:", error);
res.status(500).json({ error: "Failed to get failure counts" });
}
});
/**
* POST /enrichment/retry
* Retry specific failed items
*/
router.post("/retry", requireAdmin, async (req, res) => {
try {
const { ids } = req.body;
if (!ids || !Array.isArray(ids) || ids.length === 0) {
return res
.status(400)
.json({ error: "Must provide array of failure IDs" });
}
// Reset retry count for these failures
await enrichmentFailureService.resetRetryCount(ids);
// Get the failures to determine what to retry
const failures = await Promise.all(
ids.map((id) => enrichmentFailureService.getFailure(id))
);
// Group by type and trigger appropriate re-enrichment
const { prisma } = await import("../utils/db");
let queued = 0;
let skipped = 0;
for (const failure of failures) {
if (!failure) continue;
try {
if (failure.entityType === "artist") {
// Check if artist still exists
const artist = await prisma.artist.findUnique({
where: { id: failure.entityId },
select: { id: true },
});
if (!artist) {
// Entity was deleted - mark failure as resolved
await enrichmentFailureService.resolveFailures([
failure.id,
]);
skipped++;
continue;
}
// Reset artist enrichment status
await prisma.artist.update({
where: { id: failure.entityId },
data: { enrichmentStatus: "pending" },
});
queued++;
} else if (failure.entityType === "track") {
// Check if track still exists
const track = await prisma.track.findUnique({
where: { id: failure.entityId },
select: { id: true },
});
if (!track) {
// Entity was deleted - mark failure as resolved
await enrichmentFailureService.resolveFailures([
failure.id,
]);
skipped++;
continue;
}
// Reset track tag status
await prisma.track.update({
where: { id: failure.entityId },
data: { lastfmTags: [] },
});
queued++;
} else if (failure.entityType === "audio") {
// Check if track still exists
const track = await prisma.track.findUnique({
where: { id: failure.entityId },
select: { id: true },
});
if (!track) {
// Entity was deleted - mark failure as resolved
await enrichmentFailureService.resolveFailures([
failure.id,
]);
skipped++;
continue;
}
// Reset audio analysis status
await prisma.track.update({
where: { id: failure.entityId },
data: {
analysisStatus: "pending",
analysisRetryCount: 0,
},
});
queued++;
}
} catch (error) {
logger.error(
`Failed to reset ${failure.entityType} ${failure.entityId}:`,
error
);
// Don't re-throw - continue processing other failures
}
}
res.json({
message: `Queued ${queued} items for retry, ${skipped} skipped (entities no longer exist)`,
queued,
skipped,
});
} catch (error: any) {
logger.error("Retry failures error:", error);
res.status(500).json({
error: error.message || "Failed to retry failures",
});
}
});
/**
* POST /enrichment/skip
* Skip specific failures (won't retry automatically)
*/
router.post("/skip", requireAdmin, async (req, res) => {
try {
const { ids } = req.body;
if (!ids || !Array.isArray(ids) || ids.length === 0) {
return res
.status(400)
.json({ error: "Must provide array of failure IDs" });
}
const count = await enrichmentFailureService.skipFailures(ids);
res.json({
message: `Skipped ${count} failures`,
count,
});
} catch (error: any) {
logger.error("Skip failures error:", error);
res.status(500).json({
error: error.message || "Failed to skip failures",
});
}
});
/**
* DELETE /enrichment/failures/:id
* Delete a specific failure record
*/
router.delete("/failures/:id", requireAdmin, async (req, res) => {
try {
const count = await enrichmentFailureService.deleteFailures([
req.params.id,
]);
res.json({
message: "Failure deleted",
count,
});
} catch (error: any) {
logger.error("Delete failure error:", error);
res.status(500).json({
error: error.message || "Failed to delete failure",
});
}
});
/**
* PUT /enrichment/artists/:id/metadata
* Update artist metadata manually (non-destructive overrides)
* User edits are stored as overrides; canonical data preserved for API lookups
*/
router.put("/artists/:id/metadata", async (req, res) => {
try {
const { name, bio, genres, mbid, heroUrl } = req.body;
const { name, bio, genres, heroUrl } = req.body;
const updateData: any = {};
if (name) updateData.name = name;
if (bio) updateData.summary = bio;
if (mbid) updateData.mbid = mbid;
if (heroUrl) updateData.heroUrl = heroUrl;
if (genres) updateData.manualGenres = JSON.stringify(genres);
let hasOverrides = false;
// Mark as manually edited
updateData.manuallyEdited = true;
// Map user edits to override fields (non-destructive)
if (name !== undefined) {
updateData.displayName = name;
hasOverrides = true;
}
if (bio !== undefined) {
updateData.userSummary = bio;
hasOverrides = true;
}
if (heroUrl !== undefined) {
updateData.userHeroUrl = heroUrl;
hasOverrides = true;
}
if (genres !== undefined) {
updateData.userGenres = genres;
hasOverrides = true;
}
// Set override flag
if (hasOverrides) {
updateData.hasUserOverrides = true;
}
const { prisma } = await import("../utils/db");
const artist = await prisma.artist.update({
@@ -236,30 +602,56 @@ router.put("/artists/:id/metadata", async (req, res) => {
},
});
// Invalidate Redis cache for artist hero image
try {
await redisClient.del(`hero:${req.params.id}`);
} catch (err) {
logger.warn("Failed to invalidate Redis cache:", err);
}
res.json(artist);
} catch (error: any) {
console.error("Update artist metadata error:", error);
res.status(500).json({ error: error.message || "Failed to update artist" });
logger.error("Update artist metadata error:", error);
res.status(500).json({
error: error.message || "Failed to update artist",
});
}
});
/**
* PUT /library/albums/:id/metadata
* Update album metadata manually
* PUT /enrichment/albums/:id/metadata
* Update album metadata manually (non-destructive overrides)
* User edits are stored as overrides; canonical data preserved for API lookups
*/
router.put("/albums/:id/metadata", async (req, res) => {
try {
const { title, year, genres, rgMbid, coverUrl } = req.body;
const { title, year, genres, coverUrl } = req.body;
const updateData: any = {};
if (title) updateData.title = title;
if (year) updateData.year = parseInt(year);
if (rgMbid) updateData.rgMbid = rgMbid;
if (coverUrl) updateData.coverUrl = coverUrl;
if (genres) updateData.manualGenres = JSON.stringify(genres);
let hasOverrides = false;
// Mark as manually edited
updateData.manuallyEdited = true;
// Map user edits to override fields (non-destructive)
if (title !== undefined) {
updateData.displayTitle = title;
hasOverrides = true;
}
if (year !== undefined) {
updateData.displayYear = parseInt(year);
hasOverrides = true;
}
if (coverUrl !== undefined) {
updateData.userCoverUrl = coverUrl;
hasOverrides = true;
}
if (genres !== undefined) {
updateData.userGenres = genres;
hasOverrides = true;
}
// Set override flag
if (hasOverrides) {
updateData.hasUserOverrides = true;
}
const { prisma } = await import("../utils/db");
const album = await prisma.album.update({
@@ -285,8 +677,348 @@ router.put("/albums/:id/metadata", async (req, res) => {
res.json(album);
} catch (error: any) {
console.error("Update album metadata error:", error);
res.status(500).json({ error: error.message || "Failed to update album" });
logger.error("Update album metadata error:", error);
res.status(500).json({
error: error.message || "Failed to update album",
});
}
});
/**
* PUT /enrichment/tracks/:id/metadata
* Update track metadata manually (non-destructive overrides)
* User edits are stored as overrides; canonical data preserved
*/
router.put("/tracks/:id/metadata", async (req, res) => {
try {
const { title, trackNo } = req.body;
const updateData: any = {};
let hasOverrides = false;
// Map user edits to override fields (non-destructive)
if (title !== undefined) {
updateData.displayTitle = title;
hasOverrides = true;
}
if (trackNo !== undefined) {
updateData.displayTrackNo = parseInt(trackNo);
hasOverrides = true;
}
// Set override flag
if (hasOverrides) {
updateData.hasUserOverrides = true;
}
const { prisma } = await import("../utils/db");
const track = await prisma.track.update({
where: { id: req.params.id },
data: updateData,
include: {
album: {
select: {
id: true,
title: true,
artist: {
select: {
id: true,
name: true,
},
},
},
},
},
});
res.json(track);
} catch (error: any) {
logger.error("Update track metadata error:", error);
res.status(500).json({
error: error.message || "Failed to update track",
});
}
});
/**
* POST /enrichment/artists/:id/reset
* Reset artist metadata to canonical values (clear all user overrides)
*/
router.post("/artists/:id/reset", async (req, res) => {
try {
const { prisma } = await import("../utils/db");
// Check if artist exists first
const existingArtist = await prisma.artist.findUnique({
where: { id: req.params.id },
select: { id: true },
});
if (!existingArtist) {
return res.status(404).json({
error: "Artist not found",
message: "The artist may have been deleted",
});
}
const artist = await prisma.artist.update({
where: { id: req.params.id },
data: {
displayName: null,
userSummary: null,
userHeroUrl: null,
userGenres: [],
hasUserOverrides: false,
},
include: {
albums: {
select: {
id: true,
title: true,
year: true,
coverUrl: true,
},
},
},
});
// Invalidate Redis cache for artist hero image
try {
await redisClient.del(`hero:${req.params.id}`);
} catch (err) {
logger.warn("Failed to invalidate Redis cache:", err);
}
res.json({
message: "Artist metadata reset to original values",
artist,
});
} catch (error: any) {
// Handle P2025 specifically in case of race condition
if (error.code === "P2025") {
return res.status(404).json({
error: "Artist not found",
message: "The artist may have been deleted",
});
}
logger.error("Reset artist metadata error:", error);
res.status(500).json({
error: error.message || "Failed to reset artist metadata",
});
}
});
/**
* POST /enrichment/albums/:id/reset
* Reset album metadata to canonical values (clear all user overrides)
*/
router.post("/albums/:id/reset", async (req, res) => {
try {
const { prisma } = await import("../utils/db");
// Check if album exists first
const existingAlbum = await prisma.album.findUnique({
where: { id: req.params.id },
select: { id: true },
});
if (!existingAlbum) {
return res.status(404).json({
error: "Album not found",
message: "The album may have been deleted",
});
}
const album = await prisma.album.update({
where: { id: req.params.id },
data: {
displayTitle: null,
displayYear: null,
userCoverUrl: null,
userGenres: [],
hasUserOverrides: false,
},
include: {
artist: {
select: {
id: true,
name: true,
},
},
tracks: {
select: {
id: true,
title: true,
trackNo: true,
duration: true,
},
},
},
});
res.json({
message: "Album metadata reset to original values",
album,
});
} catch (error: any) {
// Handle P2025 specifically in case of race condition
if (error.code === "P2025") {
return res.status(404).json({
error: "Album not found",
message: "The album may have been deleted",
});
}
logger.error("Reset album metadata error:", error);
res.status(500).json({
error: error.message || "Failed to reset album metadata",
});
}
});
/**
* POST /enrichment/tracks/:id/reset
* Reset track metadata to canonical values (clear all user overrides)
*/
router.post("/tracks/:id/reset", async (req, res) => {
try {
const { prisma } = await import("../utils/db");
// Check if track exists first
const existingTrack = await prisma.track.findUnique({
where: { id: req.params.id },
select: { id: true },
});
if (!existingTrack) {
return res.status(404).json({
error: "Track not found",
message: "The track may have been deleted",
});
}
const track = await prisma.track.update({
where: { id: req.params.id },
data: {
displayTitle: null,
displayTrackNo: null,
hasUserOverrides: false,
},
include: {
album: {
select: {
id: true,
title: true,
artist: {
select: {
id: true,
name: true,
},
},
},
},
},
});
res.json({
message: "Track metadata reset to original values",
track,
});
} catch (error: any) {
// Handle P2025 specifically in case of race condition
if (error.code === "P2025") {
return res.status(404).json({
error: "Track not found",
message: "The track may have been deleted",
});
}
logger.error("Reset track metadata error:", error);
res.status(500).json({
error: error.message || "Failed to reset track metadata",
});
}
});
/**
* GET /enrichment/concurrency
* Get current enrichment concurrency configuration
*/
router.get("/concurrency", async (req, res) => {
try {
const settings = await getSystemSettings();
const concurrency = settings?.enrichmentConcurrency || 1;
// Calculate estimated speeds based on concurrency
const artistsPerMin = Math.round(10 * concurrency);
const tracksPerMin = Math.round(60 * concurrency);
res.json({
concurrency,
estimatedSpeed: `~${artistsPerMin} artists/min, ~${tracksPerMin} tracks/min`,
artistsPerMin,
tracksPerMin,
});
} catch (error) {
logger.error("Failed to get enrichment settings:", error);
res.status(500).json({ error: "Failed to get enrichment settings" });
}
});
/**
* PUT /enrichment/concurrency
* Update enrichment concurrency configuration
*/
router.put("/concurrency", requireAdmin, async (req, res) => {
try {
const { concurrency } = req.body;
if (!concurrency || typeof concurrency !== "number") {
return res
.status(400)
.json({ error: "Missing or invalid 'concurrency' parameter" });
}
// Clamp concurrency to 1-5
const clampedConcurrency = Math.max(
1,
Math.min(5, Math.floor(concurrency))
);
// Update system settings in database
const { prisma } = await import("../utils/db");
await prisma.systemSettings.upsert({
where: { id: "default" },
create: {
id: "default",
enrichmentConcurrency: clampedConcurrency,
},
update: {
enrichmentConcurrency: clampedConcurrency,
},
});
// Invalidate cache so next read gets fresh value
invalidateSystemSettingsCache();
// Update rate limiter concurrency multiplier
rateLimiter.updateConcurrencyMultiplier(clampedConcurrency);
// Calculate estimated speeds
const artistsPerMin = Math.round(10 * clampedConcurrency);
const tracksPerMin = Math.round(60 * clampedConcurrency);
logger.debug(
`[Enrichment Settings] Updated concurrency to ${clampedConcurrency}`
);
res.json({
concurrency: clampedConcurrency,
estimatedSpeed: `~${artistsPerMin} artists/min, ~${tracksPerMin} tracks/min`,
artistsPerMin,
tracksPerMin,
});
} catch (error) {
logger.error("Failed to update enrichment settings:", error);
res.status(500).json({ error: "Failed to update enrichment settings" });
}
});

View File

@@ -1,6 +1,7 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuthOrToken } from "../middleware/auth";
import { prisma } from "../utils/db";
import { prisma, Prisma } from "../utils/db";
import { redisClient } from "../utils/redis";
const router = Router();
@@ -22,14 +23,14 @@ router.get("/genres", async (req, res) => {
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(`[HOMEPAGE] Cache HIT for genres`);
logger.debug(`[HOMEPAGE] Cache HIT for genres`);
return res.json(JSON.parse(cached));
}
} catch (cacheError) {
console.warn("[HOMEPAGE] Redis cache read error:", cacheError);
logger.warn("[HOMEPAGE] Redis cache read error:", cacheError);
}
console.log(
logger.debug(
`[HOMEPAGE] ✗ Cache MISS for genres, fetching from database...`
);
@@ -37,7 +38,7 @@ router.get("/genres", async (req, res) => {
const albums = await prisma.album.findMany({
where: {
genres: {
isEmpty: false, // Only albums with genres
not: Prisma.JsonNull, // Only albums with genres (not null)
},
location: "LIBRARY", // Exclude discovery albums
},
@@ -60,8 +61,11 @@ router.get("/genres", async (req, res) => {
// Count genre occurrences
const genreCounts = new Map<string, number>();
for (const album of albums) {
for (const genre of album.genres) {
genreCounts.set(genre, (genreCounts.get(genre) || 0) + 1);
const genres = album.genres as string[];
if (genres && Array.isArray(genres)) {
for (const genre of genres) {
genreCounts.set(genre, (genreCounts.get(genre) || 0) + 1);
}
}
}
@@ -71,12 +75,15 @@ router.get("/genres", async (req, res) => {
.slice(0, limitNum)
.map(([genre]) => genre);
console.log(`[HOMEPAGE] Top genres: ${topGenres.join(", ")}`);
logger.debug(`[HOMEPAGE] Top genres: ${topGenres.join(", ")}`);
// For each top genre, get sample albums (up to 10)
const genresWithAlbums = topGenres.map((genre) => {
const genreAlbums = albums
.filter((a) => a.genres.includes(genre))
.filter((a) => {
const genres = a.genres as string[];
return genres && Array.isArray(genres) && genres.includes(genre);
})
.slice(0, 10)
.map((a) => ({
id: a.id,
@@ -103,14 +110,14 @@ router.get("/genres", async (req, res) => {
24 * 60 * 60,
JSON.stringify(genresWithAlbums)
);
console.log(`[HOMEPAGE] Cached genres for 24 hours`);
logger.debug(`[HOMEPAGE] Cached genres for 24 hours`);
} catch (cacheError) {
console.warn("[HOMEPAGE] Redis cache write error:", cacheError);
logger.warn("[HOMEPAGE] Redis cache write error:", cacheError);
}
res.json(genresWithAlbums);
} catch (error) {
console.error("Get homepage genres error:", error);
logger.error("Get homepage genres error:", error);
res.status(500).json({ error: "Failed to fetch genres" });
}
});
@@ -129,14 +136,14 @@ router.get("/top-podcasts", async (req, res) => {
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(`[HOMEPAGE] Cache HIT for top podcasts`);
logger.debug(`[HOMEPAGE] Cache HIT for top podcasts`);
return res.json(JSON.parse(cached));
}
} catch (cacheError) {
console.warn("[HOMEPAGE] Redis cache read error:", cacheError);
logger.warn("[HOMEPAGE] Redis cache read error:", cacheError);
}
console.log(
logger.debug(
`[HOMEPAGE] ✗ Cache MISS for top podcasts, fetching from database...`
);
@@ -172,14 +179,14 @@ router.get("/top-podcasts", async (req, res) => {
24 * 60 * 60,
JSON.stringify(result)
);
console.log(`[HOMEPAGE] Cached top podcasts for 24 hours`);
logger.debug(`[HOMEPAGE] Cached top podcasts for 24 hours`);
} catch (cacheError) {
console.warn("[HOMEPAGE] Redis cache write error:", cacheError);
logger.warn("[HOMEPAGE] Redis cache write error:", cacheError);
}
res.json(result);
} catch (error) {
console.error("Get top podcasts error:", error);
logger.error("Get top podcasts error:", error);
res.status(500).json({ error: "Failed to fetch top podcasts" });
}
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
import { z } from "zod";
@@ -46,7 +47,7 @@ router.post("/", async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: error.errors });
}
console.error("Update listening state error:", error);
logger.error("Update listening state error:", error);
res.status(500).json({ error: "Failed to update listening state" });
}
});
@@ -79,7 +80,7 @@ router.get("/", async (req, res) => {
res.json(state);
} catch (error) {
console.error("Get listening state error:", error);
logger.error("Get listening state error:", error);
res.status(500).json({ error: "Failed to get listening state" });
}
});
@@ -98,7 +99,7 @@ router.get("/recent", async (req, res) => {
res.json(states);
} catch (error) {
console.error("Get recent listening states error:", error);
logger.error("Get recent listening states error:", error);
res.status(500).json({
error: "Failed to get recent listening states",
});

View File

@@ -1,5 +1,6 @@
import { Router } from "express";
import { requireAuthOrToken } from "../middleware/auth";
import { logger } from "../utils/logger";
import { requireAuthOrToken, requireAdmin } from "../middleware/auth";
import { programmaticPlaylistService } from "../services/programmaticPlaylists";
import {
moodBucketService,
@@ -93,7 +94,7 @@ router.get("/", async (req, res) => {
res.json(mixes);
} catch (error) {
console.error("Get mixes error:", error);
logger.error("Get mixes error:", error);
res.status(500).json({ error: "Failed to get mixes" });
}
});
@@ -252,7 +253,7 @@ router.post("/mood", async (req, res) => {
.map((id: string) => tracks.find((t) => t.id === id))
.filter((t: any) => t !== undefined);
console.log(
logger.debug(
`[MIXES] Generated mood-on-demand mix with ${mix.trackCount} tracks`
);
@@ -261,7 +262,7 @@ router.post("/mood", async (req, res) => {
tracks: orderedTracks,
});
} catch (error) {
console.error("Generate mood mix error:", error);
logger.error("Generate mood mix error:", error);
res.status(500).json({ error: "Failed to generate mood mix" });
}
});
@@ -430,11 +431,11 @@ router.post("/mood/save-preferences", async (req, res) => {
const cacheKey = `mixes:${userId}`;
await redisClient.del(cacheKey);
console.log(`[MIXES] Saved mood mix preferences for user ${userId}`);
logger.debug(`[MIXES] Saved mood mix preferences for user ${userId}`);
res.json({ success: true, message: "Mood preferences saved" });
} catch (error) {
console.error("Save mood preferences error:", error);
logger.error("Save mood preferences error:", error);
res.status(500).json({ error: "Failed to save mood preferences" });
}
});
@@ -462,7 +463,7 @@ router.get("/mood/buckets/presets", async (req, res) => {
const presets = await moodBucketService.getMoodPresets();
res.json(presets);
} catch (error) {
console.error("Get mood presets error:", error);
logger.error("Get mood presets error:", error);
res.status(500).json({ error: "Failed to get mood presets" });
}
});
@@ -535,7 +536,7 @@ router.get("/mood/buckets/:mood", async (req, res) => {
tracks: orderedTracks,
});
} catch (error) {
console.error("Get mood bucket mix error:", error);
logger.error("Get mood bucket mix error:", error);
res.status(500).json({ error: "Failed to get mood mix" });
}
});
@@ -611,7 +612,7 @@ router.post("/mood/buckets/:mood/save", async (req, res) => {
.map((id: string) => tracks.find((t) => t.id === id))
.filter((t: any) => t !== undefined);
console.log(
logger.debug(
`[MIXES] Saved mood bucket mix for user ${userId}: ${mood} (${savedMix.trackCount} tracks)`
);
@@ -623,7 +624,7 @@ router.post("/mood/buckets/:mood/save", async (req, res) => {
},
});
} catch (error) {
console.error("Save mood bucket mix error:", error);
logger.error("Save mood bucket mix error:", error);
res.status(500).json({ error: "Failed to save mood mix" });
}
});
@@ -642,15 +643,14 @@ router.post("/mood/buckets/:mood/save", async (req, res) => {
* 200:
* description: Backfill completed
*/
router.post("/mood/buckets/backfill", async (req, res) => {
router.post("/mood/buckets/backfill", requireAdmin, async (req, res) => {
try {
const userId = getRequestUserId(req);
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
// TODO: Add admin check
console.log(
logger.debug(
`[MIXES] Starting mood bucket backfill requested by user ${userId}`
);
@@ -662,7 +662,7 @@ router.post("/mood/buckets/backfill", async (req, res) => {
assigned: result.assigned,
});
} catch (error) {
console.error("Backfill mood buckets error:", error);
logger.error("Backfill mood buckets error:", error);
res.status(500).json({ error: "Failed to backfill mood buckets" });
}
});
@@ -721,7 +721,7 @@ router.post("/refresh", async (req, res) => {
res.json({ message: "Mixes refreshed", mixes });
} catch (error) {
console.error("Refresh mixes error:", error);
logger.error("Refresh mixes error:", error);
res.status(500).json({ error: "Failed to refresh mixes" });
}
});
@@ -849,7 +849,7 @@ router.post("/:id/save", async (req, res) => {
data: playlistItems,
});
console.log(
logger.debug(
`[MIXES] Saved mix ${mixId} as playlist ${playlist.id} (${mix.trackIds.length} tracks)`
);
@@ -859,7 +859,7 @@ router.post("/:id/save", async (req, res) => {
trackCount: mix.trackIds.length,
});
} catch (error) {
console.error("Save mix as playlist error:", error);
logger.error("Save mix as playlist error:", error);
res.status(500).json({ error: "Failed to save mix as playlist" });
}
});
@@ -982,7 +982,7 @@ router.get("/:id", async (req, res) => {
tracks: orderedTracks,
});
} catch (error) {
console.error("Get mix error:", error);
logger.error("Get mix error:", error);
res.status(500).json({ error: "Failed to get mix" });
}
});

View File

@@ -1,6 +1,7 @@
import { Router, Response } from "express";
import { Router, Request, Response } from "express";
import { logger } from "../utils/logger";
import { notificationService } from "../services/notificationService";
import { AuthenticatedRequest, requireAuth } from "../middleware/auth";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
const router = Router();
@@ -12,9 +13,9 @@ const router = Router();
router.get(
"/",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
console.log(
logger.debug(
`[Notifications] Fetching notifications for user ${
req.user!.id
}`
@@ -22,12 +23,12 @@ router.get(
const notifications = await notificationService.getForUser(
req.user!.id
);
console.log(
logger.debug(
`[Notifications] Found ${notifications.length} notifications`
);
res.json(notifications);
} catch (error: any) {
console.error("Error fetching notifications:", error);
logger.error("Error fetching notifications:", error);
res.status(500).json({ error: "Failed to fetch notifications" });
}
}
@@ -40,14 +41,14 @@ router.get(
router.get(
"/unread-count",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
const count = await notificationService.getUnreadCount(
req.user!.id
);
res.json({ count });
} catch (error: any) {
console.error("Error fetching unread count:", error);
logger.error("Error fetching unread count:", error);
res.status(500).json({ error: "Failed to fetch unread count" });
}
}
@@ -60,12 +61,12 @@ router.get(
router.post(
"/:id/read",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
await notificationService.markAsRead(req.params.id, req.user!.id);
res.json({ success: true });
} catch (error: any) {
console.error("Error marking notification as read:", error);
logger.error("Error marking notification as read:", error);
res.status(500).json({
error: "Failed to mark notification as read",
});
@@ -80,12 +81,12 @@ router.post(
router.post(
"/read-all",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
await notificationService.markAllAsRead(req.user!.id);
res.json({ success: true });
} catch (error: any) {
console.error("Error marking all notifications as read:", error);
logger.error("Error marking all notifications as read:", error);
res.status(500).json({
error: "Failed to mark all notifications as read",
});
@@ -100,12 +101,12 @@ router.post(
router.post(
"/:id/clear",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
await notificationService.clear(req.params.id, req.user!.id);
res.json({ success: true });
} catch (error: any) {
console.error("Error clearing notification:", error);
logger.error("Error clearing notification:", error);
res.status(500).json({ error: "Failed to clear notification" });
}
}
@@ -118,12 +119,12 @@ router.post(
router.post(
"/clear-all",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
await notificationService.clearAll(req.user!.id);
res.json({ success: true });
} catch (error: any) {
console.error("Error clearing all notifications:", error);
logger.error("Error clearing all notifications:", error);
res.status(500).json({
error: "Failed to clear all notifications",
});
@@ -138,11 +139,12 @@ router.post(
/**
* GET /notifications/downloads/history
* Get completed/failed downloads that haven't been cleared
* Deduplicated by album subject (shows only most recent entry per album)
*/
router.get(
"/downloads/history",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
const downloads = await prisma.downloadJob.findMany({
where: {
@@ -151,11 +153,23 @@ router.get(
cleared: false,
},
orderBy: { updatedAt: "desc" },
take: 50,
take: 100, // Fetch more to account for duplicates
});
res.json(downloads);
// Deduplicate by subject - keep only the most recent entry per album
const seen = new Set<string>();
const deduplicated = downloads.filter((download) => {
if (seen.has(download.subject)) {
return false; // Skip duplicate
}
seen.add(download.subject);
return true; // Keep first occurrence (most recent due to ordering)
});
// Return top 50 after deduplication
res.json(deduplicated.slice(0, 50));
} catch (error: any) {
console.error("Error fetching download history:", error);
logger.error("Error fetching download history:", error);
res.status(500).json({ error: "Failed to fetch download history" });
}
}
@@ -168,7 +182,7 @@ router.get(
router.get(
"/downloads/active",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
const downloads = await prisma.downloadJob.findMany({
where: {
@@ -179,7 +193,7 @@ router.get(
});
res.json(downloads);
} catch (error: any) {
console.error("Error fetching active downloads:", error);
logger.error("Error fetching active downloads:", error);
res.status(500).json({ error: "Failed to fetch active downloads" });
}
}
@@ -192,7 +206,7 @@ router.get(
router.post(
"/downloads/:id/clear",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
await prisma.downloadJob.updateMany({
where: {
@@ -203,7 +217,7 @@ router.post(
});
res.json({ success: true });
} catch (error: any) {
console.error("Error clearing download:", error);
logger.error("Error clearing download:", error);
res.status(500).json({ error: "Failed to clear download" });
}
}
@@ -216,7 +230,7 @@ router.post(
router.post(
"/downloads/clear-all",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
await prisma.downloadJob.updateMany({
where: {
@@ -228,7 +242,7 @@ router.post(
});
res.json({ success: true });
} catch (error: any) {
console.error("Error clearing all downloads:", error);
logger.error("Error clearing all downloads:", error);
res.status(500).json({ error: "Failed to clear all downloads" });
}
}
@@ -241,7 +255,7 @@ router.post(
router.post(
"/downloads/:id/retry",
requireAuth,
async (req: AuthenticatedRequest, res: Response) => {
async (req: Request, res: Response) => {
try {
// Get the failed download
const failedJob = await prisma.downloadJob.findFirst({
@@ -478,11 +492,9 @@ router.post(
const albumTitle = metadata.albumTitle as string;
if (!artistName || !albumTitle) {
return res
.status(400)
.json({
error: "Cannot retry: missing artist/album info",
});
return res.status(400).json({
error: "Cannot retry: missing artist/album info",
});
}
// Mark old job as cleared
@@ -546,13 +558,13 @@ router.post(
},
];
console.log(
logger.debug(
`[Retry] Trying Soulseek for ${artistName} - ${albumTitle}`
);
// Run Soulseek search async
soulseekService
.searchAndDownloadBatch(tracks, musicPath, 4)
.searchAndDownloadBatch(tracks, musicPath, settings?.soulseekConcurrentDownloads || 4)
.then(async (result) => {
if (result.successful > 0) {
await prisma.downloadJob.update({
@@ -569,7 +581,7 @@ router.post(
},
},
});
console.log(
logger.debug(
`[Retry] ✓ Soulseek downloaded ${result.successful} tracks for ${artistName} - ${albumTitle}`
);
@@ -585,7 +597,7 @@ router.post(
});
} else {
// Soulseek failed, try Lidarr if we have an MBID
console.log(
logger.debug(
`[Retry] Soulseek failed, trying Lidarr for ${artistName} - ${albumTitle}`
);
@@ -631,7 +643,7 @@ router.post(
}
})
.catch(async (error) => {
console.error(`[Retry] Soulseek error:`, error);
logger.error(`[Retry] Soulseek error:`, error);
await prisma.downloadJob.update({
where: { id: newJobRecord.id },
data: {
@@ -676,7 +688,7 @@ router.post(
artistMbid: failedJob.artistMbid,
subject: failedJob.subject,
status: "pending",
metadata: metadata || {},
metadata: (metadata || {}) as any,
},
});
@@ -702,7 +714,7 @@ router.post(
error: result.error,
});
} catch (error: any) {
console.error("Error retrying download:", error);
logger.error("Error retrying download:", error);
res.status(500).json({ error: "Failed to retry download" });
}
}

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
import { z } from "zod";
@@ -19,12 +20,12 @@ router.post("/albums/:id/download", async (req, res) => {
const { quality } = downloadAlbumSchema.parse(req.body);
// Get user's default quality if not specified
let selectedQuality = quality;
if (!selectedQuality) {
let selectedQuality: "original" | "high" | "medium" | "low" = quality || "medium";
if (!quality) {
const settings = await prisma.userSettings.findUnique({
where: { userId },
});
selectedQuality = (settings?.playbackQuality as any) || "medium";
selectedQuality = (settings?.playbackQuality as "original" | "high" | "medium" | "low") || "medium";
}
// Get album with tracks
@@ -103,7 +104,7 @@ router.post("/albums/:id/download", async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: error.errors });
}
console.error("Create download job error:", error);
logger.error("Create download job error:", error);
res.status(500).json({ error: "Failed to create download job" });
}
});
@@ -145,7 +146,7 @@ router.post("/tracks/:id/complete", async (req, res) => {
res.json(cachedTrack);
} catch (error) {
console.error("Complete track download error:", error);
logger.error("Complete track download error:", error);
res.status(500).json({ error: "Failed to complete download" });
}
});
@@ -209,7 +210,7 @@ router.get("/albums", async (req, res) => {
res.json(albums);
} catch (error) {
console.error("Get cached albums error:", error);
logger.error("Get cached albums error:", error);
res.status(500).json({ error: "Failed to get cached albums" });
}
});
@@ -245,7 +246,7 @@ router.delete("/albums/:id", async (req, res) => {
deletedCount: cachedTracks.length,
});
} catch (error) {
console.error("Delete cached album error:", error);
logger.error("Delete cached album error:", error);
res.status(500).json({ error: "Failed to delete cached album" });
}
});
@@ -278,7 +279,7 @@ router.get("/stats", async (req, res) => {
trackCount,
});
} catch (error) {
console.error("Get cache stats error:", error);
logger.error("Get cache stats error:", error);
res.status(500).json({ error: "Failed to get cache stats" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import bcrypt from "bcrypt";
import { z } from "zod";
@@ -49,14 +50,14 @@ async function ensureEncryptionKey(): Promise<void> {
process.env.SETTINGS_ENCRYPTION_KEY !==
"default-encryption-key-change-me"
) {
console.log("[ONBOARDING] Encryption key already exists");
logger.debug("[ONBOARDING] Encryption key already exists");
return;
}
// Generate a secure 32-byte encryption key
const encryptionKey = crypto.randomBytes(32).toString("base64");
console.log(
logger.debug(
"[ONBOARDING] Generating encryption key for settings security..."
);
@@ -69,9 +70,9 @@ async function ensureEncryptionKey(): Promise<void> {
// Update the process environment so it's available immediately
process.env.SETTINGS_ENCRYPTION_KEY = encryptionKey;
console.log("[ONBOARDING] Encryption key generated and saved to .env");
logger.debug("[ONBOARDING] Encryption key generated and saved to .env");
} catch (error) {
console.error("[ONBOARDING] Failed to save encryption key:", error);
logger.error("[ONBOARDING] Failed to save encryption key:", error);
throw new Error("Failed to generate encryption key");
}
}
@@ -82,7 +83,7 @@ async function ensureEncryptionKey(): Promise<void> {
*/
router.post("/register", async (req, res) => {
try {
console.log("[ONBOARDING] Register attempt for user:", req.body?.username);
logger.debug("[ONBOARDING] Register attempt for user:", req.body?.username);
const { username, password } = registerSchema.parse(req.body);
// Check if any user exists (first user becomes admin)
@@ -100,7 +101,7 @@ router.post("/register", async (req, res) => {
});
if (existing) {
console.log("[ONBOARDING] Username already taken:", username);
logger.debug("[ONBOARDING] Username already taken:", username);
return res.status(400).json({ error: "Username already taken" });
}
@@ -131,9 +132,10 @@ router.post("/register", async (req, res) => {
id: user.id,
username: user.username,
role: user.role,
tokenVersion: user.tokenVersion,
});
console.log("[ONBOARDING] User created successfully:", user.username);
logger.debug("[ONBOARDING] User created successfully:", user.username);
res.json({
token,
user: {
@@ -145,12 +147,12 @@ router.post("/register", async (req, res) => {
});
} catch (err: any) {
if (err instanceof z.ZodError) {
console.error("[ONBOARDING] Validation error:", err.errors);
logger.error("[ONBOARDING] Validation error:", err.errors);
return res
.status(400)
.json({ error: "Invalid request", details: err.errors });
}
console.error("Registration error:", err);
logger.error("Registration error:", err);
res.status(500).json({ error: "Failed to create account" });
}
});
@@ -189,10 +191,10 @@ router.post("/lidarr", requireAuth, async (req, res) => {
if (response.status === 200) {
connectionTested = true;
console.log("Lidarr connection test successful");
logger.debug("Lidarr connection test successful");
}
} catch (error: any) {
console.warn(
logger.warn(
" Lidarr connection test failed (saved anyway):",
error.message
);
@@ -229,7 +231,7 @@ router.post("/lidarr", requireAuth, async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: err.errors });
}
console.error("Lidarr config error:", err);
logger.error("Lidarr config error:", err);
res.status(500).json({ error: "Failed to save configuration" });
}
});
@@ -265,10 +267,10 @@ router.post("/audiobookshelf", requireAuth, async (req, res) => {
if (response.status === 200) {
connectionTested = true;
console.log("Audiobookshelf connection test successful");
logger.debug("Audiobookshelf connection test successful");
}
} catch (error: any) {
console.warn(
logger.warn(
" Audiobookshelf connection test failed (saved anyway):",
error.message
);
@@ -305,7 +307,7 @@ router.post("/audiobookshelf", requireAuth, async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: err.errors });
}
console.error("Audiobookshelf config error:", err);
logger.error("Audiobookshelf config error:", err);
res.status(500).json({ error: "Failed to save configuration" });
}
});
@@ -363,7 +365,7 @@ router.post("/soulseek", requireAuth, async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: err.errors });
}
console.error("Soulseek config error:", err);
logger.error("Soulseek config error:", err);
res.status(500).json({ error: "Failed to save configuration" });
}
});
@@ -394,7 +396,7 @@ router.post("/enrichment", requireAuth, async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: err.errors });
}
console.error("Enrichment config error:", err);
logger.error("Enrichment config error:", err);
res.status(500).json({ error: "Failed to save configuration" });
}
});
@@ -410,10 +412,10 @@ router.post("/complete", requireAuth, async (req, res) => {
data: { onboardingComplete: true },
});
console.log("[ONBOARDING] User completed onboarding:", req.user!.id);
logger.debug("[ONBOARDING] User completed onboarding:", req.user!.id);
res.json({ success: true });
} catch (err: any) {
console.error("Onboarding complete error:", err);
logger.error("Onboarding complete error:", err);
res.status(500).json({ error: "Failed to complete onboarding" });
}
});
@@ -467,7 +469,7 @@ router.get("/status", async (req, res) => {
});
}
} catch (err: any) {
console.error("Onboarding status error:", err);
logger.error("Onboarding status error:", err);
res.status(500).json({ error: "Failed to check status" });
}
});

View File

@@ -1,5 +1,6 @@
import express from "express";
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import { prisma, Prisma } from "../utils/db";
import { requireAuth } from "../middleware/auth";
const router = express.Router();
@@ -19,7 +20,7 @@ router.get("/", requireAuth, async (req, res) => {
res.json(playbackState);
} catch (error) {
console.error("Get playback state error:", error);
logger.error("Get playback state error:", error);
res.status(500).json({ error: "Failed to get playback state" });
}
});
@@ -46,7 +47,7 @@ router.post("/", requireAuth, async (req, res) => {
// Validate playback type
const validPlaybackTypes = ["track", "audiobook", "podcast"];
if (!validPlaybackTypes.includes(playbackType)) {
console.warn(`[PlaybackState] Invalid playbackType: ${playbackType}`);
logger.warn(`[PlaybackState] Invalid playbackType: ${playbackType}`);
return res.status(400).json({ error: "Invalid playbackType" });
}
@@ -79,7 +80,7 @@ router.post("/", requireAuth, async (req, res) => {
safeQueue = null;
}
} catch (sanitizeError: any) {
console.error("[PlaybackState] Queue sanitization failed:", sanitizeError?.message);
logger.error("[PlaybackState] Queue sanitization failed:", sanitizeError?.message);
safeQueue = null; // Fall back to null queue
}
}
@@ -96,7 +97,7 @@ router.post("/", requireAuth, async (req, res) => {
trackId: trackId || null,
audiobookId: audiobookId || null,
podcastId: podcastId || null,
queue: safeQueue,
queue: safeQueue === null ? Prisma.DbNull : safeQueue,
currentIndex: safeCurrentIndex,
isShuffle: isShuffle || false,
},
@@ -106,7 +107,7 @@ router.post("/", requireAuth, async (req, res) => {
trackId: trackId || null,
audiobookId: audiobookId || null,
podcastId: podcastId || null,
queue: safeQueue,
queue: safeQueue === null ? Prisma.DbNull : safeQueue,
currentIndex: safeCurrentIndex,
isShuffle: isShuffle || false,
},
@@ -114,13 +115,13 @@ router.post("/", requireAuth, async (req, res) => {
res.json(playbackState);
} catch (error: any) {
console.error("[PlaybackState] Error saving state:", error?.message || error);
console.error("[PlaybackState] Full error:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
logger.error("[PlaybackState] Error saving state:", error?.message || error);
logger.error("[PlaybackState] Full error:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
if (error?.code) {
console.error("[PlaybackState] Error code:", error.code);
logger.error("[PlaybackState] Error code:", error.code);
}
if (error?.meta) {
console.error("[PlaybackState] Prisma meta:", error.meta);
logger.error("[PlaybackState] Prisma meta:", error.meta);
}
// Return more specific error for debugging
res.status(500).json({
@@ -141,7 +142,7 @@ router.delete("/", requireAuth, async (req, res) => {
res.json({ success: true });
} catch (error) {
console.error("Delete playback state error:", error);
logger.error("Delete playback state error:", error);
res.status(500).json({ error: "Failed to delete playback state" });
}
});

View File

@@ -1,7 +1,8 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { z } from "zod";
import { requireAuthOrToken } from "../middleware/auth";
import { prisma } from "../utils/db";
import { z } from "zod";
import { sessionLog } from "../utils/playlistLogger";
const router = Router();
@@ -20,6 +21,9 @@ const addTrackSchema = z.object({
// GET /playlists
router.get("/", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
// Get user's hidden playlists
@@ -74,11 +78,11 @@ router.get("/", async (req, res) => {
// Debug: log shared playlists with user info
const sharedPlaylists = playlistsWithCounts.filter((p) => !p.isOwner);
if (sharedPlaylists.length > 0) {
console.log(
logger.debug(
`[Playlists] Found ${sharedPlaylists.length} shared playlists for user ${userId}:`
);
sharedPlaylists.forEach((p) => {
console.log(
logger.debug(
` - "${p.name}" by ${
p.user?.username || "UNKNOWN"
} (owner: ${p.userId})`
@@ -88,7 +92,7 @@ router.get("/", async (req, res) => {
res.json(playlistsWithCounts);
} catch (error) {
console.error("Get playlists error:", error);
logger.error("Get playlists error:", error);
res.status(500).json({ error: "Failed to get playlists" });
}
});
@@ -96,6 +100,9 @@ router.get("/", async (req, res) => {
// POST /playlists
router.post("/", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
const data = createPlaylistSchema.parse(req.body);
@@ -114,7 +121,7 @@ router.post("/", async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: error.errors });
}
console.error("Create playlist error:", error);
logger.error("Create playlist error:", error);
res.status(500).json({ error: "Failed to create playlist" });
}
});
@@ -122,6 +129,9 @@ router.post("/", async (req, res) => {
// GET /playlists/:id
router.get("/:id", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
const playlist = await prisma.playlist.findUnique({
@@ -132,6 +142,10 @@ router.get("/:id", async (req, res) => {
username: true,
},
},
hiddenByUsers: {
where: { userId },
select: { id: true },
},
items: {
include: {
track: {
@@ -203,6 +217,7 @@ router.get("/:id", async (req, res) => {
res.json({
...playlist,
isOwner: playlist.userId === userId,
isHidden: playlist.hiddenByUsers.length > 0,
trackCount: playlist.items.length,
pendingCount: playlist.pendingTracks.length,
items: formattedItems,
@@ -210,7 +225,7 @@ router.get("/:id", async (req, res) => {
mergedItems,
});
} catch (error) {
console.error("Get playlist error:", error);
logger.error("Get playlist error:", error);
res.status(500).json({ error: "Failed to get playlist" });
}
});
@@ -218,6 +233,9 @@ router.get("/:id", async (req, res) => {
// PUT /playlists/:id
router.put("/:id", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
const data = createPlaylistSchema.parse(req.body);
@@ -249,7 +267,7 @@ router.put("/:id", async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: error.errors });
}
console.error("Update playlist error:", error);
logger.error("Update playlist error:", error);
res.status(500).json({ error: "Failed to update playlist" });
}
});
@@ -257,6 +275,9 @@ router.put("/:id", async (req, res) => {
// POST /playlists/:id/hide - Hide any playlist from your view
router.post("/:id/hide", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
const playlistId = req.params.id;
@@ -285,7 +306,7 @@ router.post("/:id/hide", async (req, res) => {
res.json({ message: "Playlist hidden", isHidden: true });
} catch (error) {
console.error("Hide playlist error:", error);
logger.error("Hide playlist error:", error);
res.status(500).json({ error: "Failed to hide playlist" });
}
});
@@ -293,6 +314,9 @@ router.post("/:id/hide", async (req, res) => {
// DELETE /playlists/:id/hide - Unhide a shared playlist
router.delete("/:id/hide", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
const playlistId = req.params.id;
@@ -303,7 +327,7 @@ router.delete("/:id/hide", async (req, res) => {
res.json({ message: "Playlist unhidden", isHidden: false });
} catch (error) {
console.error("Unhide playlist error:", error);
logger.error("Unhide playlist error:", error);
res.status(500).json({ error: "Failed to unhide playlist" });
}
});
@@ -311,6 +335,9 @@ router.delete("/:id/hide", async (req, res) => {
// DELETE /playlists/:id
router.delete("/:id", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
// Check ownership
@@ -332,7 +359,7 @@ router.delete("/:id", async (req, res) => {
res.json({ message: "Playlist deleted" });
} catch (error) {
console.error("Delete playlist error:", error);
logger.error("Delete playlist error:", error);
res.status(500).json({ error: "Failed to delete playlist" });
}
});
@@ -340,6 +367,7 @@ router.delete("/:id", async (req, res) => {
// POST /playlists/:id/items
router.post("/:id/items", async (req, res) => {
try {
if (!req.user) return res.status(401).json({ error: "Unauthorized" });
const userId = req.user.id;
const parsedBody = addTrackSchema.safeParse(req.body);
if (!parsedBody.success) {
@@ -425,7 +453,7 @@ router.post("/:id/items", async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: error.errors });
}
console.error("Add track to playlist error:", error);
logger.error("Add track to playlist error:", error);
res.status(500).json({ error: "Failed to add track to playlist" });
}
});
@@ -433,7 +461,7 @@ router.post("/:id/items", async (req, res) => {
// DELETE /playlists/:id/items/:trackId
router.delete("/:id/items/:trackId", async (req, res) => {
try {
const userId = req.user.id;
const userId = req.user!.id;
// Check ownership
const playlist = await prisma.playlist.findUnique({
@@ -459,7 +487,7 @@ router.delete("/:id/items/:trackId", async (req, res) => {
res.json({ message: "Track removed from playlist" });
} catch (error) {
console.error("Remove track from playlist error:", error);
logger.error("Remove track from playlist error:", error);
res.status(500).json({ error: "Failed to remove track from playlist" });
}
});
@@ -467,7 +495,7 @@ router.delete("/:id/items/:trackId", async (req, res) => {
// PUT /playlists/:id/items/reorder
router.put("/:id/items/reorder", async (req, res) => {
try {
const userId = req.user.id;
const userId = req.user!.id;
const { trackIds } = req.body; // Array of track IDs in new order
if (!Array.isArray(trackIds)) {
@@ -504,7 +532,7 @@ router.put("/:id/items/reorder", async (req, res) => {
res.json({ message: "Playlist reordered" });
} catch (error) {
console.error("Reorder playlist error:", error);
logger.error("Reorder playlist error:", error);
res.status(500).json({ error: "Failed to reorder playlist" });
}
});
@@ -519,7 +547,7 @@ router.put("/:id/items/reorder", async (req, res) => {
*/
router.get("/:id/pending", async (req, res) => {
try {
const userId = req.user.id;
const userId = req.user!.id;
const playlistId = req.params.id;
// Check ownership or public access
@@ -553,7 +581,7 @@ router.get("/:id/pending", async (req, res) => {
spotifyPlaylistId: playlist.spotifyPlaylistId,
});
} catch (error) {
console.error("Get pending tracks error:", error);
logger.error("Get pending tracks error:", error);
res.status(500).json({ error: "Failed to get pending tracks" });
}
});
@@ -564,7 +592,7 @@ router.get("/:id/pending", async (req, res) => {
*/
router.delete("/:id/pending/:trackId", async (req, res) => {
try {
const userId = req.user.id;
const userId = req.user!.id;
const { id: playlistId, trackId: pendingTrackId } = req.params;
// Check ownership
@@ -589,7 +617,7 @@ router.delete("/:id/pending/:trackId", async (req, res) => {
if (error.code === "P2025") {
return res.status(404).json({ error: "Pending track not found" });
}
console.error("Delete pending track error:", error);
logger.error("Delete pending track error:", error);
res.status(500).json({ error: "Failed to delete pending track" });
}
});
@@ -632,7 +660,7 @@ router.get("/:id/pending/:trackId/preview", async (req, res) => {
res.json({ previewUrl });
} catch (error: any) {
console.error("Get preview URL error:", error);
logger.error("Get preview URL error:", error);
res.status(500).json({ error: "Failed to get preview URL" });
}
});
@@ -644,7 +672,7 @@ router.get("/:id/pending/:trackId/preview", async (req, res) => {
*/
router.post("/:id/pending/:trackId/retry", async (req, res) => {
try {
const userId = req.user.id;
const userId = req.user!.id;
const { id: playlistId, trackId: pendingTrackId } = req.params;
sessionLog(
@@ -771,7 +799,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
? pendingTrack.spotifyAlbum
: pendingTrack.spotifyArtist; // Use artist as fallback folder name
console.log(
logger.debug(
`[Retry] Starting download for: ${pendingTrack.spotifyArtist} - ${pendingTrack.spotifyTitle}`
);
sessionLog(
@@ -787,7 +815,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
);
if (!searchResult.found || searchResult.allMatches.length === 0) {
console.log(`[Retry] No results found on Soulseek`);
logger.debug(`[Retry] No results found on Soulseek`);
sessionLog("PENDING-RETRY", `No results found on Soulseek`, "INFO");
await prisma.downloadJob.update({
@@ -806,7 +834,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
});
}
console.log(
logger.debug(
`[Retry] ✓ Found ${searchResult.allMatches.length} results, starting download in background`
);
sessionLog(
@@ -833,7 +861,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
)
.then(async (result) => {
if (result.success) {
console.log(
logger.debug(
`[Retry] ✓ Download complete: ${result.filePath}`
);
sessionLog(
@@ -870,7 +898,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
removeOnComplete: true,
}
);
console.log(
logger.debug(
`[Retry] Queued library scan to reconcile pending tracks`
);
sessionLog(
@@ -880,7 +908,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
})`
);
} catch (scanError) {
console.error(
logger.error(
`[Retry] Failed to queue scan:`,
scanError
);
@@ -893,7 +921,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
);
}
} else {
console.log(`[Retry] Download failed: ${result.error}`);
logger.debug(`[Retry] Download failed: ${result.error}`);
sessionLog(
"PENDING-RETRY",
`Download failed: ${result.error || "unknown error"}`,
@@ -911,7 +939,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
}
})
.catch((error) => {
console.error(`[Retry] Download error:`, error);
logger.error(`[Retry] Download error:`, error);
sessionLog(
"PENDING-RETRY",
`Download exception: ${error?.message || error}`,
@@ -930,7 +958,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
.catch(() => undefined);
});
} catch (error: any) {
console.error("Retry pending track error:", error);
logger.error("Retry pending track error:", error);
sessionLog(
"PENDING-RETRY",
`Handler error: ${error?.message || error}`,
@@ -949,7 +977,7 @@ router.post("/:id/pending/:trackId/retry", async (req, res) => {
*/
router.post("/:id/pending/reconcile", async (req, res) => {
try {
const userId = req.user.id;
const userId = req.user!.id;
const playlistId = req.params.id;
// Check ownership
@@ -977,7 +1005,7 @@ router.post("/:id/pending/reconcile", async (req, res) => {
playlistsUpdated: result.playlistsUpdated,
});
} catch (error) {
console.error("Reconcile pending tracks error:", error);
logger.error("Reconcile pending tracks error:", error);
res.status(500).json({ error: "Failed to reconcile pending tracks" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
import { z } from "zod";
@@ -40,7 +41,7 @@ router.post("/", async (req, res) => {
.status(400)
.json({ error: "Invalid request", details: error.errors });
}
console.error("Create play error:", error);
logger.error("Create play error:", error);
res.status(500).json({ error: "Failed to log play" });
}
});
@@ -76,7 +77,7 @@ router.get("/", async (req, res) => {
res.json(plays);
} catch (error) {
console.error("Get plays error:", error);
logger.error("Get plays error:", error);
res.status(500).json({ error: "Failed to get plays" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth, requireAuthOrToken } from "../middleware/auth";
import { prisma } from "../utils/db";
import { rssParserService } from "../services/rss-parser";
@@ -16,7 +17,7 @@ const router = Router();
router.post("/sync-covers", requireAuth, async (req, res) => {
try {
const { notificationService } = await import("../services/notificationService");
console.log(" Starting podcast cover sync...");
logger.debug(" Starting podcast cover sync...");
const podcastResult = await podcastCacheService.syncAllCovers();
const episodeResult = await podcastCacheService.syncEpisodeCovers();
@@ -25,7 +26,7 @@ router.post("/sync-covers", requireAuth, async (req, res) => {
await notificationService.notifySystem(
req.user!.id,
"Podcast Covers Synced",
`Synced ${podcastResult.cached || 0} podcast covers and ${episodeResult.cached || 0} episode covers`
`Synced ${podcastResult.synced || 0} podcast covers and ${episodeResult.synced || 0} episode covers`
);
res.json({
@@ -34,7 +35,7 @@ router.post("/sync-covers", requireAuth, async (req, res) => {
episodes: episodeResult,
});
} catch (error: any) {
console.error("Podcast cover sync failed:", error);
logger.error("Podcast cover sync failed:", error);
res.status(500).json({
error: "Sync failed",
message: error.message,
@@ -110,7 +111,7 @@ router.get("/", async (req, res) => {
res.json(podcasts);
} catch (error: any) {
console.error("Error fetching podcasts:", error);
logger.error("Error fetching podcasts:", error);
res.status(500).json({
error: "Failed to fetch podcasts",
message: error.message,
@@ -127,7 +128,7 @@ router.get("/discover/top", requireAuthOrToken, async (req, res) => {
const { limit = "20" } = req.query;
const podcastLimit = Math.min(parseInt(limit as string, 10), 50);
console.log(`\n[TOP PODCASTS] Request (limit: ${podcastLimit})`);
logger.debug(`\n[TOP PODCASTS] Request (limit: ${podcastLimit})`);
// Simple iTunes search - same as the working search bar!
const itunesResponse = await axios.get(
@@ -155,10 +156,10 @@ router.get("/discover/top", requireAuthOrToken, async (req, res) => {
isExternal: true,
}));
console.log(` Found ${podcasts.length} podcasts`);
logger.debug(` Found ${podcasts.length} podcasts`);
res.json(podcasts);
} catch (error: any) {
console.error("Error fetching top podcasts:", error);
logger.error("Error fetching top podcasts:", error);
res.status(500).json({
error: "Failed to fetch top podcasts",
message: error.message,
@@ -174,7 +175,7 @@ router.get("/discover/genres", async (req, res) => {
try {
const { genres } = req.query; // Comma-separated genre IDs
console.log(`\n[GENRE PODCASTS] Request (genres: ${genres})`);
logger.debug(`\n[GENRE PODCASTS] Request (genres: ${genres})`);
if (!genres || typeof genres !== "string") {
return res.status(400).json({
@@ -198,7 +199,7 @@ router.get("/discover/genres", async (req, res) => {
// Fetch podcasts for each genre using simple iTunes search - PARALLEL execution
const genreFetchPromises = genreIds.map(async (genreId) => {
const searchTerm = genreSearchTerms[genreId] || "podcast";
console.log(` Searching for "${searchTerm}"...`);
logger.debug(` Searching for "${searchTerm}"...`);
try {
// Simple iTunes search - same as the working search bar!
@@ -230,12 +231,12 @@ router.get("/discover/genres", async (req, res) => {
})
);
console.log(
logger.debug(
` Found ${podcasts.length} podcasts for genre ${genreId}`
);
return { genreId, podcasts };
} catch (error: any) {
console.error(
logger.error(
` Error searching for ${searchTerm}:`,
error.message
);
@@ -252,12 +253,12 @@ router.get("/discover/genres", async (req, res) => {
results[genreId] = podcasts;
}
console.log(
logger.debug(
` Fetched podcasts for ${genreIds.length} genres (parallel)`
);
res.json(results);
} catch (error: any) {
console.error("Error fetching genre podcasts:", error);
logger.error("Error fetching genre podcasts:", error);
res.status(500).json({
error: "Failed to fetch genre podcasts",
message: error.message,
@@ -277,7 +278,7 @@ router.get("/discover/genre/:genreId", async (req, res) => {
const podcastLimit = Math.min(parseInt(limit as string, 10), 50);
const podcastOffset = parseInt(offset as string, 10);
console.log(
logger.debug(
`\n[GENRE PAGINATED] Request (genre: ${genreId}, limit: ${podcastLimit}, offset: ${podcastOffset})`
);
@@ -293,7 +294,7 @@ router.get("/discover/genre/:genreId", async (req, res) => {
};
const searchTerm = genreSearchTerms[genreId] || "podcast";
console.log(
logger.debug(
` Searching for "${searchTerm}" (offset: ${podcastOffset})...`
);
@@ -332,12 +333,12 @@ router.get("/discover/genre/:genreId", async (req, res) => {
podcastOffset + podcastLimit
);
console.log(
logger.debug(
` Found ${podcasts.length} podcasts (total available: ${allPodcasts.length})`
);
res.json(podcasts);
} catch (error: any) {
console.error("Error fetching paginated genre podcasts:", error);
logger.error("Error fetching paginated genre podcasts:", error);
res.status(500).json({
error: "Failed to fetch podcasts",
message: error.message,
@@ -354,7 +355,7 @@ router.get("/preview/:itunesId", async (req, res) => {
try {
const { itunesId } = req.params;
console.log(`\n [PODCAST PREVIEW] iTunes ID: ${itunesId}`);
logger.debug(`\n [PODCAST PREVIEW] iTunes ID: ${itunesId}`);
// Try to fetch from iTunes API
const itunesResponse = await axios.get(
@@ -406,7 +407,7 @@ router.get("/preview/:itunesId", async (req, res) => {
podcastData.feedUrl
);
description =
feedData.description || feedData.itunes?.summary || "";
feedData.podcast.description || "";
// Get first 3 episodes for preview
previewEpisodes = (feedData.episodes || [])
@@ -417,11 +418,11 @@ router.get("/preview/:itunesId", async (req, res) => {
duration: episode.duration || 0,
}));
console.log(
logger.debug(
` [PODCAST PREVIEW] Fetched description (${description.length} chars) and ${previewEpisodes.length} preview episodes`
);
} catch (error) {
console.warn(` Failed to fetch RSS feed for preview:`, error);
logger.warn(` Failed to fetch RSS feed for preview:`, error);
// Continue without description and episodes
}
}
@@ -440,7 +441,7 @@ router.get("/preview/:itunesId", async (req, res) => {
subscribedPodcastId: isSubscribed ? existingPodcast!.id : null,
});
} catch (error: any) {
console.error("Error previewing podcast:", error);
logger.error("Error previewing podcast:", error);
res.status(500).json({
error: "Failed to preview podcast",
message: error.message,
@@ -532,7 +533,7 @@ router.get("/:id", async (req, res) => {
isSubscribed: true,
});
} catch (error: any) {
console.error("Error fetching podcast:", error);
logger.error("Error fetching podcast:", error);
res.status(500).json({
error: "Failed to fetch podcast",
message: error.message,
@@ -554,17 +555,17 @@ router.post("/subscribe", async (req, res) => {
.json({ error: "feedUrl or itunesId is required" });
}
console.log(
logger.debug(
`\n [PODCAST] Subscribe request from ${req.user!.username}`
);
console.log(` Feed URL: ${feedUrl || "N/A"}`);
console.log(` iTunes ID: ${itunesId || "N/A"}`);
logger.debug(` Feed URL: ${feedUrl || "N/A"}`);
logger.debug(` iTunes ID: ${itunesId || "N/A"}`);
let finalFeedUrl = feedUrl;
// If only iTunes ID provided, fetch feed URL from iTunes API
if (!finalFeedUrl && itunesId) {
console.log(` Looking up feed URL from iTunes...`);
logger.debug(` Looking up feed URL from iTunes...`);
const itunesResponse = await axios.get(
"https://itunes.apple.com/lookup",
{
@@ -582,7 +583,7 @@ router.post("/subscribe", async (req, res) => {
}
finalFeedUrl = itunesResponse.data.results[0].feedUrl;
console.log(` Found feed URL: ${finalFeedUrl}`);
logger.debug(` Found feed URL: ${finalFeedUrl}`);
}
// Check if podcast already exists in database
@@ -591,7 +592,7 @@ router.post("/subscribe", async (req, res) => {
});
if (podcast) {
console.log(` Podcast exists in database: ${podcast.title}`);
logger.debug(` Podcast exists in database: ${podcast.title}`);
// Check if user is already subscribed
const existingSubscription =
@@ -605,7 +606,7 @@ router.post("/subscribe", async (req, res) => {
});
if (existingSubscription) {
console.log(` User already subscribed`);
logger.debug(` User already subscribed`);
return res.json({
success: true,
podcast: {
@@ -624,7 +625,7 @@ router.post("/subscribe", async (req, res) => {
},
});
console.log(` User subscribed to existing podcast`);
logger.debug(` User subscribed to existing podcast`);
return res.json({
success: true,
podcast: {
@@ -636,14 +637,14 @@ router.post("/subscribe", async (req, res) => {
}
// Parse RSS feed to get podcast and episodes
console.log(` Parsing RSS feed...`);
logger.debug(` Parsing RSS feed...`);
const { podcast: podcastData, episodes } =
await rssParserService.parseFeed(finalFeedUrl);
// Create podcast in database
console.log(` Saving podcast to database...`);
logger.debug(` Saving podcast to database...`);
const finalItunesId = itunesId || podcastData.itunesId;
console.log(` iTunes ID to save: ${finalItunesId || "NONE"}`);
logger.debug(` iTunes ID to save: ${finalItunesId || "NONE"}`);
podcast = await prisma.podcast.create({
data: {
@@ -659,11 +660,11 @@ router.post("/subscribe", async (req, res) => {
},
});
console.log(` Podcast created: ${podcast.id}`);
console.log(` iTunes ID saved: ${podcast.itunesId || "NONE"}`);
logger.debug(` Podcast created: ${podcast.id}`);
logger.debug(` iTunes ID saved: ${podcast.itunesId || "NONE"}`);
// Save episodes
console.log(` Saving ${episodes.length} episodes...`);
logger.debug(` Saving ${episodes.length} episodes...`);
await prisma.podcastEpisode.createMany({
data: episodes.map((ep) => ({
podcastId: podcast!.id,
@@ -682,7 +683,7 @@ router.post("/subscribe", async (req, res) => {
skipDuplicates: true,
});
console.log(` Episodes saved`);
logger.debug(` Episodes saved`);
// Subscribe user
await prisma.podcastSubscription.create({
@@ -692,7 +693,7 @@ router.post("/subscribe", async (req, res) => {
},
});
console.log(` User subscribed successfully`);
logger.debug(` User subscribed successfully`);
res.json({
success: true,
@@ -703,7 +704,7 @@ router.post("/subscribe", async (req, res) => {
message: "Subscribed successfully",
});
} catch (error: any) {
console.error("Error subscribing to podcast:", error);
logger.error("Error subscribing to podcast:", error);
res.status(500).json({
error: "Failed to subscribe to podcast",
message: error.message,
@@ -719,9 +720,9 @@ router.delete("/:id/unsubscribe", async (req, res) => {
try {
const { id } = req.params;
console.log(`\n[PODCAST] Unsubscribe request`);
console.log(` User: ${req.user!.username}`);
console.log(` Podcast ID: ${id}`);
logger.debug(`\n[PODCAST] Unsubscribe request`);
logger.debug(` User: ${req.user!.username}`);
logger.debug(` Podcast ID: ${id}`);
// Delete subscription
const deleted = await prisma.podcastSubscription.deleteMany({
@@ -757,14 +758,14 @@ router.delete("/:id/unsubscribe", async (req, res) => {
},
});
console.log(` Unsubscribed successfully`);
logger.debug(` Unsubscribed successfully`);
res.json({
success: true,
message: "Unsubscribed successfully",
});
} catch (error: any) {
console.error("Error unsubscribing from podcast:", error);
logger.error("Error unsubscribing from podcast:", error);
res.status(500).json({
error: "Failed to unsubscribe",
message: error.message,
@@ -780,8 +781,8 @@ router.get("/:id/refresh", async (req, res) => {
try {
const { id } = req.params;
console.log(`\n [PODCAST] Refresh request`);
console.log(` Podcast ID: ${id}`);
logger.debug(`\n [PODCAST] Refresh request`);
logger.debug(` Podcast ID: ${id}`);
const podcast = await prisma.podcast.findUnique({
where: { id },
@@ -792,7 +793,7 @@ router.get("/:id/refresh", async (req, res) => {
}
// Parse RSS feed
console.log(` Parsing RSS feed...`);
logger.debug(` Parsing RSS feed...`);
const { podcast: podcastData, episodes } =
await rssParserService.parseFeed(podcast.feedUrl);
@@ -844,7 +845,7 @@ router.get("/:id/refresh", async (req, res) => {
}
}
console.log(
logger.debug(
` Refresh complete. ${newEpisodesCount} new episodes added.`
);
@@ -855,7 +856,7 @@ router.get("/:id/refresh", async (req, res) => {
message: `Found ${newEpisodesCount} new episodes`,
});
} catch (error: any) {
console.error("Error refreshing podcast:", error);
logger.error("Error refreshing podcast:", error);
res.status(500).json({
error: "Failed to refresh podcast",
message: error.message,
@@ -888,7 +889,7 @@ router.get("/:podcastId/episodes/:episodeId/cache-status", async (req, res) => {
path: cachedPath ? true : false, // Don't expose actual path
});
} catch (error: any) {
console.error("[PODCAST] Cache status check failed:", error);
logger.error("[PODCAST] Cache status check failed:", error);
res.status(500).json({ error: "Failed to check cache status" });
}
});
@@ -904,12 +905,12 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
const userId = req.user?.id;
const podcastDebug = process.env.PODCAST_DEBUG === "1";
console.log(`\n [PODCAST STREAM] Request:`);
console.log(` Podcast ID: ${podcastId}`);
console.log(` Episode ID: ${episodeId}`);
logger.debug(`\n [PODCAST STREAM] Request:`);
logger.debug(` Podcast ID: ${podcastId}`);
logger.debug(` Episode ID: ${episodeId}`);
if (podcastDebug) {
console.log(` Range: ${req.headers.range || "none"}`);
console.log(` UA: ${req.headers["user-agent"] || "unknown"}`);
logger.debug(` Range: ${req.headers.range || "none"}`);
logger.debug(` UA: ${req.headers["user-agent"] || "unknown"}`);
}
const episode = await prisma.podcastEpisode.findUnique({
@@ -921,10 +922,10 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
}
if (podcastDebug) {
console.log(` Episode DB: title="${episode.title}"`);
console.log(` Episode DB: guid="${episode.guid}"`);
console.log(` Episode DB: audioUrl="${episode.audioUrl}"`);
console.log(` Episode DB: mimeType="${episode.mimeType || "unknown"}" fileSize=${episode.fileSize || 0}`);
logger.debug(` Episode DB: title="${episode.title}"`);
logger.debug(` Episode DB: guid="${episode.guid}"`);
logger.debug(` Episode DB: audioUrl="${episode.audioUrl}"`);
logger.debug(` Episode DB: mimeType="${episode.mimeType || "unknown"}" fileSize=${episode.fileSize || 0}`);
}
const range = req.headers.range;
@@ -937,12 +938,12 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
const cachedPath = await getCachedFilePath(episodeId);
if (cachedPath) {
console.log(` Streaming from cache: ${cachedPath}`);
logger.debug(` Streaming from cache: ${cachedPath}`);
try {
const stats = await fs.promises.stat(cachedPath);
const fileSize = stats.size;
if (podcastDebug) {
console.log(` Cache file size: ${fileSize}`);
logger.debug(` Cache file size: ${fileSize}`);
}
if (fileSize === 0) {
@@ -958,7 +959,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
// Validate range bounds
if (start >= fileSize) {
console.log(
logger.debug(
` Range start ${start} >= file size ${fileSize}, clamping to EOF`
);
// Browsers can occasionally request a range start beyond EOF during media seeking.
@@ -987,7 +988,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
});
fileStream.pipe(res);
fileStream.on("error", (err) => {
console.error(" Cache stream error:", err);
logger.error(" Cache stream error:", err);
if (!res.headersSent) {
res.status(500).json({
error: "Failed to stream episode",
@@ -1002,7 +1003,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
const validEnd = Math.min(end, fileSize - 1);
const chunkSize = validEnd - start + 1;
console.log(
logger.debug(
` Serving range: bytes ${start}-${validEnd}/${fileSize}`
);
@@ -1029,7 +1030,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
});
fileStream.pipe(res);
fileStream.on("error", (err) => {
console.error(" Cache stream error:", err);
logger.error(" Cache stream error:", err);
if (!res.headersSent) {
res.status(500).json({
error: "Failed to stream episode",
@@ -1042,7 +1043,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
}
// No range - serve entire file
console.log(` Serving full file: ${fileSize} bytes`);
logger.debug(` Serving full file: ${fileSize} bytes`);
res.writeHead(200, {
"Content-Type": episode.mimeType || "audio/mpeg",
"Content-Length": fileSize,
@@ -1061,7 +1062,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
});
fileStream.pipe(res);
fileStream.on("error", (err) => {
console.error(" Cache stream error:", err);
logger.error(" Cache stream error:", err);
if (!res.headersSent) {
res.status(500).json({
error: "Failed to stream episode",
@@ -1072,7 +1073,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
});
return; // CRITICAL: Exit after starting cache stream
} catch (err: any) {
console.error(
logger.error(
" Failed to stream from cache, falling back to RSS:",
err.message
);
@@ -1082,12 +1083,12 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
// Not cached yet - trigger background download while streaming from RSS
if (userId && !isDownloading(episodeId)) {
console.log(` Triggering background download for caching`);
logger.debug(` Triggering background download for caching`);
downloadInBackground(episodeId, episode.audioUrl, userId);
}
// Stream from RSS URL
console.log(` Streaming from RSS: ${episode.audioUrl}`);
logger.debug(` Streaming from RSS: ${episode.audioUrl}`);
// Get file size first for proper range handling
let fileSize = episode.fileSize;
@@ -1104,7 +1105,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
});
}
} catch (err) {
console.warn(" Could not get file size via HEAD request");
logger.warn(" Could not get file size via HEAD request");
}
}
@@ -1115,7 +1116,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
const chunkSize = end - start + 1;
console.log(` Range request: bytes=${start}-${end}/${fileSize}`);
logger.debug(` Range request: bytes=${start}-${end}/${fileSize}`);
try {
// Try range request first
@@ -1149,7 +1150,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
} catch (rangeError: any) {
// 416 = Range Not Satisfiable - many podcast CDNs don't support range requests
// Fall back to streaming the full file and let the browser handle seeking
console.log(
logger.debug(
` Range request failed (${
rangeError.response?.status || rangeError.message
}), falling back to full stream`
@@ -1183,7 +1184,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
}
} else {
// No range request - stream entire file
console.log(` Streaming full file`);
logger.debug(` Streaming full file`);
const response = await axios.get(episode.audioUrl, {
responseType: "stream",
@@ -1209,7 +1210,7 @@ router.get("/:podcastId/episodes/:episodeId/stream", async (req, res) => {
response.data.pipe(res);
}
} catch (error: any) {
console.error("\n [PODCAST STREAM] Error:", error.message);
logger.error("\n [PODCAST STREAM] Error:", error.message);
if (!res.headersSent) {
res.status(500).json({
error: "Failed to stream episode",
@@ -1228,12 +1229,12 @@ router.post("/:podcastId/episodes/:episodeId/progress", async (req, res) => {
const { podcastId, episodeId } = req.params;
const { currentTime, duration, isFinished } = req.body;
console.log(`\n [PODCAST PROGRESS] Update:`);
console.log(` User: ${req.user!.username}`);
console.log(` Episode ID: ${episodeId}`);
console.log(` Current Time: ${currentTime}s`);
console.log(` Duration: ${duration}s`);
console.log(` Finished: ${isFinished}`);
logger.debug(`\n [PODCAST PROGRESS] Update:`);
logger.debug(` User: ${req.user!.username}`);
logger.debug(` Episode ID: ${episodeId}`);
logger.debug(` Current Time: ${currentTime}s`);
logger.debug(` Duration: ${duration}s`);
logger.debug(` Finished: ${isFinished}`);
const progress = await prisma.podcastProgress.upsert({
where: {
@@ -1257,7 +1258,7 @@ router.post("/:podcastId/episodes/:episodeId/progress", async (req, res) => {
},
});
console.log(` Progress saved`);
logger.debug(` Progress saved`);
res.json({
success: true,
@@ -1271,7 +1272,7 @@ router.post("/:podcastId/episodes/:episodeId/progress", async (req, res) => {
},
});
} catch (error: any) {
console.error("Error updating progress:", error);
logger.error("Error updating progress:", error);
res.status(500).json({
error: "Failed to update progress",
message: error.message,
@@ -1287,9 +1288,9 @@ router.delete("/:podcastId/episodes/:episodeId/progress", async (req, res) => {
try {
const { episodeId } = req.params;
console.log(`\n[PODCAST PROGRESS] Delete:`);
console.log(` User: ${req.user!.username}`);
console.log(` Episode ID: ${episodeId}`);
logger.debug(`\n[PODCAST PROGRESS] Delete:`);
logger.debug(` User: ${req.user!.username}`);
logger.debug(` Episode ID: ${episodeId}`);
await prisma.podcastProgress.deleteMany({
where: {
@@ -1298,14 +1299,14 @@ router.delete("/:podcastId/episodes/:episodeId/progress", async (req, res) => {
},
});
console.log(` Progress removed`);
logger.debug(` Progress removed`);
res.json({
success: true,
message: "Progress removed",
});
} catch (error: any) {
console.error("Error removing progress:", error);
logger.error("Error removing progress:", error);
res.status(500).json({
error: "Failed to remove progress",
message: error.message,
@@ -1329,7 +1330,7 @@ router.get("/:id/similar", async (req, res) => {
return res.status(404).json({ error: "Podcast not found" });
}
console.log(`\n [SIMILAR PODCASTS] Request for: ${podcast.title}`);
logger.debug(`\n [SIMILAR PODCASTS] Request for: ${podcast.title}`);
try {
// Check cache first
@@ -1344,7 +1345,7 @@ router.get("/:id/similar", async (req, res) => {
});
if (cachedRecommendations.length > 0) {
console.log(
logger.debug(
` Using ${cachedRecommendations.length} cached recommendations`
);
return res.json(
@@ -1364,15 +1365,15 @@ router.get("/:id/similar", async (req, res) => {
}
// Fetch from iTunes Search API
console.log(` Fetching from iTunes Search API...`);
logger.debug(` Fetching from iTunes Search API...`);
const { itunesService } = await import("../services/itunes");
const recommendations = await itunesService.getSimilarPodcasts(
podcast.title,
podcast.description || undefined,
podcast.author
podcast.description ?? undefined,
podcast.author ?? undefined
);
console.log(` Found ${recommendations.length} similar podcasts`);
logger.debug(` Found ${recommendations.length} similar podcasts`);
if (recommendations.length > 0) {
// Cache recommendations
@@ -1400,7 +1401,7 @@ router.get("/:id/similar", async (req, res) => {
})),
});
console.log(
logger.debug(
` Cached ${recommendations.length} recommendations`
);
@@ -1420,14 +1421,14 @@ router.get("/:id/similar", async (req, res) => {
);
}
} catch (error: any) {
console.warn(" iTunes search failed:", error.message);
logger.warn(" iTunes search failed:", error.message);
}
// No recommendations available
console.log(` No recommendations found`);
logger.debug(` No recommendations found`);
res.json([]);
} catch (error: any) {
console.error("Error fetching similar podcasts:", error);
logger.error("Error fetching similar podcasts:", error);
res.status(500).json({
error: "Failed to fetch similar podcasts",
message: error.message,
@@ -1488,7 +1489,7 @@ router.get("/:id/cover", async (req, res) => {
res.status(404).json({ error: "Cover not found" });
} catch (error: any) {
console.error("Error serving podcast cover:", error);
logger.error("Error serving podcast cover:", error);
res.status(500).json({
error: "Failed to serve cover",
message: error.message,
@@ -1549,7 +1550,7 @@ router.get("/episodes/:episodeId/cover", async (req, res) => {
res.status(404).json({ error: "Cover not found" });
} catch (error: any) {
console.error("Error serving episode cover:", error);
logger.error("Error serving episode cover:", error);
res.status(500).json({
error: "Failed to serve cover",
message: error.message,

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth, requireAuthOrToken } from "../middleware/auth";
import { prisma } from "../utils/db";
import { lastFmService } from "../services/lastfm";
@@ -93,7 +94,7 @@ router.get("/for-you", async (req, res) => {
});
const ownedArtistIds = new Set(ownedArtists.map((a) => a.artistId));
console.log(
logger.debug(
`Filtering recommendations: ${ownedArtistIds.size} owned artists to exclude`
);
@@ -158,11 +159,11 @@ router.get("/for-you", async (req, res) => {
};
});
console.log(
logger.debug(
`Recommendations: Found ${artistsWithMetadata.length} new artists`
);
artistsWithMetadata.forEach((a) => {
console.log(
logger.debug(
` ${a.name}: coverArt=${a.coverArt ? "YES" : "NO"}, albums=${
a.albumCount
}`
@@ -171,7 +172,7 @@ router.get("/for-you", async (req, res) => {
res.json({ artists: artistsWithMetadata });
} catch (error) {
console.error("Get recommendations for you error:", error);
logger.error("Get recommendations for you error:", error);
res.status(500).json({ error: "Failed to get recommendations" });
}
});
@@ -244,7 +245,7 @@ router.get("/", async (req, res) => {
recommendations,
});
} catch (error) {
console.error("Get recommendations error:", error);
logger.error("Get recommendations error:", error);
res.status(500).json({ error: "Failed to get recommendations" });
}
});
@@ -363,7 +364,7 @@ router.get("/albums", async (req, res) => {
recommendations,
});
} catch (error) {
console.error("Get album recommendations error:", error);
logger.error("Get album recommendations error:", error);
res.status(500).json({
error: "Failed to get album recommendations",
});
@@ -459,7 +460,7 @@ router.get("/tracks", async (req, res) => {
recommendations,
});
} catch (error) {
console.error("Get track recommendations error:", error);
logger.error("Get track recommendations error:", error);
res.status(500).json({
error: "Failed to get track recommendations",
});

View File

@@ -1,6 +1,8 @@
import { logger } from "../utils/logger";
/**
* Release Radar API
*
*
* Provides upcoming and recent releases from:
* 1. Lidarr monitored artists (via calendar API)
* 2. Similar artists from user's library (Last.fm similar artists)
@@ -52,7 +54,7 @@ router.get("/radar", async (req, res) => {
const endDate = new Date(now);
endDate.setDate(endDate.getDate() + daysAhead);
console.log(`[Releases] Fetching radar: ${daysBack} days back, ${daysAhead} days ahead`);
logger.debug(`[Releases] Fetching radar: ${daysBack} days back, ${daysAhead} days ahead`);
// 1. Get releases from Lidarr calendar (monitored artists)
const lidarrReleases = await lidarrService.getCalendar(startDate, endDate);
@@ -92,8 +94,8 @@ router.get("/radar", async (req, res) => {
sa => sa.toArtist.mbid && !monitoredMbids.has(sa.toArtist.mbid)
);
console.log(`[Releases] Found ${lidarrReleases.length} Lidarr releases`);
console.log(`[Releases] Found ${unmonitoredSimilar.length} unmonitored similar artists`);
logger.debug(`[Releases] Found ${lidarrReleases.length} Lidarr releases`);
logger.debug(`[Releases] Found ${unmonitoredSimilar.length} unmonitored similar artists`);
// 4. Get albums in library to check what user already has
const libraryAlbums = await prisma.album.findMany({
@@ -142,7 +144,7 @@ router.get("/radar", async (req, res) => {
res.json(response);
} catch (error: any) {
console.error("[Releases] Radar error:", error.message);
logger.error("[Releases] Radar error:", error.message);
res.status(500).json({ error: "Failed to fetch release radar" });
}
});
@@ -173,7 +175,7 @@ router.get("/upcoming", async (req, res) => {
daysAhead,
});
} catch (error: any) {
console.error("[Releases] Upcoming error:", error.message);
logger.error("[Releases] Upcoming error:", error.message);
res.status(500).json({ error: "Failed to fetch upcoming releases" });
}
});
@@ -195,7 +197,6 @@ router.get("/recent", async (req, res) => {
// Get library albums to mark what's already downloaded
const libraryAlbums = await prisma.album.findMany({
where: { rgMbid: { not: null } },
select: { rgMbid: true }
});
const libraryMbids = new Set(libraryAlbums.map(a => a.rgMbid).filter(Boolean));
@@ -214,7 +215,7 @@ router.get("/recent", async (req, res) => {
inLibraryCount: releases.length - notInLibrary.length,
});
} catch (error: any) {
console.error("[Releases] Recent error:", error.message);
logger.error("[Releases] Recent error:", error.message);
res.status(500).json({ error: "Failed to fetch recent releases" });
}
});
@@ -233,24 +234,15 @@ router.post("/download/:albumMbid", async (req, res) => {
return res.status(401).json({ error: "Authentication required" });
}
console.log(`[Releases] Download requested for album: ${albumMbid}`);
logger.debug(`[Releases] Download requested for album: ${albumMbid}`);
// Use Lidarr to download the album
const result = await lidarrService.downloadAlbum(albumMbid);
if (result) {
res.json({
success: true,
message: "Download started",
albumId: result.id
});
} else {
res.status(404).json({
error: "Album not found in Lidarr or download failed"
});
}
// TODO: Implement downloadAlbum method on LidarrService
// For now, return not implemented error
res.status(501).json({
error: "Download feature not yet implemented for release radar"
});
} catch (error: any) {
console.error("[Releases] Download error:", error.message);
logger.error("[Releases] Download error:", error.message);
res.status(500).json({ error: "Failed to start download" });
}
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
import { audiobookshelfService } from "../services/audiobookshelf";
@@ -33,7 +34,7 @@ router.use(requireAuth);
* name: type
* schema:
* type: string
* enum: [all, artists, albums, tracks, audiobooks, podcasts]
* enum: [all, artists, albums, tracks, audiobooks, podcasts, episodes]
* description: Type of content to search
* default: all
* - in: query
@@ -102,11 +103,13 @@ router.get("/", async (req, res) => {
}
// Check cache for library search (short TTL since library can change)
const cacheKey = `search:library:${type}:${genre || ""}:${query}:${searchLimit}`;
const cacheKey = `search:library:${type}:${
genre || ""
}:${query}:${searchLimit}`;
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(`[SEARCH] Cache hit for query="${query}"`);
logger.debug(`[SEARCH] Cache hit for query="${query}"`);
return res.json(JSON.parse(cached));
}
} catch (err) {
@@ -119,6 +122,7 @@ router.get("/", async (req, res) => {
tracks: [],
audiobooks: [],
podcasts: [],
episodes: [],
};
// Search artists using full-text search (only show artists with actual albums in library)
@@ -246,41 +250,48 @@ router.get("/", async (req, res) => {
}
}
// Search audiobooks
// Search audiobooks using FTS
if (type === "all" || type === "audiobooks") {
try {
const audiobooks = await audiobookshelfService.searchAudiobooks(
query
);
results.audiobooks = audiobooks.slice(0, searchLimit);
const audiobooks = await searchService.searchAudiobooksFTS({
query,
limit: searchLimit,
});
results.audiobooks = audiobooks;
} catch (error) {
console.error("Audiobook search error:", error);
logger.error("Audiobook search error:", error);
results.audiobooks = [];
}
}
// Search podcasts (search through owned podcasts)
// Search podcasts using FTS
if (type === "all" || type === "podcasts") {
try {
const allPodcasts =
await audiobookshelfService.getAllPodcasts();
results.podcasts = allPodcasts
.filter(
(p) =>
p.media?.metadata?.title
?.toLowerCase()
.includes(query.toLowerCase()) ||
p.media?.metadata?.author
?.toLowerCase()
.includes(query.toLowerCase())
)
.slice(0, searchLimit);
const podcasts = await searchService.searchPodcastsFTS({
query,
limit: searchLimit,
});
results.podcasts = podcasts;
} catch (error) {
console.error("Podcast search error:", error);
logger.error("Podcast search error:", error);
results.podcasts = [];
}
}
// Search podcast episodes
if (type === "all" || type === "episodes") {
try {
const episodes = await searchService.searchEpisodes({
query,
limit: searchLimit,
});
results.episodes = episodes;
} catch (error) {
logger.error("Episode search error:", error);
results.episodes = [];
}
}
// Cache search results for 2 minutes (library can change)
try {
await redisClient.setEx(cacheKey, 120, JSON.stringify(results));
@@ -290,7 +301,7 @@ router.get("/", async (req, res) => {
res.json(results);
} catch (error) {
console.error("Search error:", error);
logger.error("Search error:", error);
res.status(500).json({ error: "Search failed" });
}
});
@@ -315,7 +326,7 @@ router.get("/genres", async (req, res) => {
}))
);
} catch (error) {
console.error("Get genres error:", error);
logger.error("Get genres error:", error);
res.status(500).json({ error: "Failed to get genres" });
}
});
@@ -339,13 +350,13 @@ router.get("/discover", async (req, res) => {
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(
logger.debug(
`[SEARCH DISCOVER] Cache hit for query="${query}" type=${type}`
);
return res.json(JSON.parse(cached));
}
} catch (err) {
console.warn("[SEARCH DISCOVER] Redis read error:", err);
logger.warn("[SEARCH DISCOVER] Redis read error:", err);
}
const results: any[] = [];
@@ -353,27 +364,56 @@ router.get("/discover", async (req, res) => {
if (type === "music" || type === "all") {
// Search Last.fm for artists AND tracks
try {
// Search for artists
// Check if query is a potential alias
let searchQuery = query;
let aliasInfo: any = null;
try {
const correction = await lastFmService.getArtistCorrection(query);
if (correction?.corrected) {
// Query is an alias - search for canonical name instead
searchQuery = correction.canonicalName;
aliasInfo = {
type: "alias_resolution",
original: query,
canonical: correction.canonicalName,
mbid: correction.mbid,
};
logger.debug(
`[SEARCH DISCOVER] Alias resolved: "${query}" → "${correction.canonicalName}"`
);
}
} catch (correctionError) {
logger.warn("[SEARCH DISCOVER] Correction check failed:", correctionError);
}
// Search for artists (using potentially corrected query)
const lastfmArtistResults = await lastFmService.searchArtists(
query,
searchQuery,
searchLimit
);
console.log(
logger.debug(
`[SEARCH ENDPOINT] Found ${lastfmArtistResults.length} artist results`
);
// Add alias info to response if applicable
if (aliasInfo) {
results.push(aliasInfo);
}
results.push(...lastfmArtistResults);
// Search for tracks (songs)
// Search for tracks (songs) - use corrected query for consistency
const lastfmTrackResults = await lastFmService.searchTracks(
query,
searchQuery,
searchLimit
);
console.log(
logger.debug(
`[SEARCH ENDPOINT] Found ${lastfmTrackResults.length} track results`
);
results.push(...lastfmTrackResults);
} catch (error) {
console.error("Last.fm search error:", error);
logger.error("Last.fm search error:", error);
}
}
@@ -410,7 +450,7 @@ router.get("/discover", async (req, res) => {
results.push(...podcasts);
} catch (error) {
console.error("iTunes podcast search error:", error);
logger.error("iTunes podcast search error:", error);
}
}
@@ -419,12 +459,12 @@ router.get("/discover", async (req, res) => {
try {
await redisClient.setEx(cacheKey, 900, JSON.stringify(payload));
} catch (err) {
console.warn("[SEARCH DISCOVER] Redis write error:", err);
logger.warn("[SEARCH DISCOVER] Redis write error:", err);
}
res.json(payload);
} catch (error) {
console.error("Discovery search error:", error);
logger.error("Discovery search error:", error);
res.status(500).json({ error: "Discovery search failed" });
}
});

View File

@@ -1,7 +1,9 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth } from "../middleware/auth";
import { prisma } from "../utils/db";
import { z } from "zod";
import { staleJobCleanupService } from "../services/staleJobCleanup";
const router = Router();
@@ -38,7 +40,7 @@ router.get("/", async (req, res) => {
res.json(settings);
} catch (error) {
console.error("Get settings error:", error);
logger.error("Get settings error:", error);
res.status(500).json({ error: "Failed to get settings" });
}
});
@@ -65,9 +67,30 @@ router.post("/", async (req, res) => {
.status(400)
.json({ error: "Invalid settings", details: error.errors });
}
console.error("Update settings error:", error);
logger.error("Update settings error:", error);
res.status(500).json({ error: "Failed to update settings" });
}
});
// POST /settings/cleanup-stale-jobs
router.post("/cleanup-stale-jobs", async (req, res) => {
try {
const result = await staleJobCleanupService.cleanupAll();
res.json({
success: true,
cleaned: {
discoveryBatches: result.discoveryBatches,
downloadJobs: result.downloadJobs,
spotifyImportJobs: result.spotifyImportJobs,
bullQueues: result.bullQueues,
},
totalCleaned: result.totalCleaned,
});
} catch (error) {
logger.error("Stale job cleanup error:", error);
res.status(500).json({ error: "Failed to cleanup stale jobs" });
}
});
export default router;

View File

@@ -1,3 +1,5 @@
import { logger } from "../utils/logger";
/**
* Soulseek routes - Direct connection via slsk-client
* Simplified API for status and manual search/download
@@ -23,7 +25,7 @@ async function requireSoulseekConfigured(req: any, res: any, next: any) {
next();
} catch (error) {
console.error("Error checking Soulseek settings:", error);
logger.error("Error checking Soulseek settings:", error);
res.status(500).json({ error: "Failed to check settings" });
}
}
@@ -52,7 +54,7 @@ router.get("/status", requireAuth, async (req, res) => {
username: status.username,
});
} catch (error: any) {
console.error("Soulseek status error:", error.message);
logger.error("Soulseek status error:", error.message);
res.status(500).json({
error: "Failed to get Soulseek status",
details: error.message,
@@ -73,7 +75,7 @@ router.post("/connect", requireAuth, requireSoulseekConfigured, async (req, res)
message: "Connected to Soulseek network",
});
} catch (error: any) {
console.error("Soulseek connect error:", error.message);
logger.error("Soulseek connect error:", error.message);
res.status(500).json({
error: "Failed to connect to Soulseek",
details: error.message,
@@ -95,7 +97,7 @@ router.post("/search", requireAuth, requireSoulseekConfigured, async (req, res)
});
}
console.log(`[Soulseek] Searching: "${artist} - ${title}"`);
logger.debug(`[Soulseek] Searching: "${artist} - ${title}"`);
const result = await soulseekService.searchTrack(artist, title);
@@ -117,7 +119,7 @@ router.post("/search", requireAuth, requireSoulseekConfigured, async (req, res)
});
}
} catch (error: any) {
console.error("Soulseek search error:", error.message);
logger.error("Soulseek search error:", error.message);
res.status(500).json({
error: "Search failed",
details: error.message,
@@ -148,7 +150,7 @@ router.post("/download", requireAuth, requireSoulseekConfigured, async (req, res
});
}
console.log(`[Soulseek] Downloading: "${artist} - ${title}"`);
logger.debug(`[Soulseek] Downloading: "${artist} - ${title}"`);
const result = await soulseekService.searchAndDownload(
artist,
@@ -169,7 +171,7 @@ router.post("/download", requireAuth, requireSoulseekConfigured, async (req, res
});
}
} catch (error: any) {
console.error("Soulseek download error:", error.message);
logger.error("Soulseek download error:", error.message);
res.status(500).json({
error: "Download failed",
details: error.message,

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuthOrToken } from "../middleware/auth";
import { z } from "zod";
import { spotifyService } from "../services/spotify";
@@ -51,7 +52,7 @@ router.post("/parse", async (req, res) => {
url: `https://open.spotify.com/playlist/${parsed.id}`,
});
} catch (error: any) {
console.error("Spotify parse error:", error);
logger.error("Spotify parse error:", error);
if (error.name === "ZodError") {
return res.status(400).json({ error: "Invalid request body" });
}
@@ -67,7 +68,7 @@ router.post("/preview", async (req, res) => {
try {
const { url } = parseUrlSchema.parse(req.body);
console.log(`[Playlist Import] Generating preview for: ${url}`);
logger.debug(`[Playlist Import] Generating preview for: ${url}`);
// Detect if it's a Deezer URL
if (url.includes("deezer.com")) {
@@ -94,7 +95,7 @@ router.post("/preview", async (req, res) => {
deezerPlaylist
);
console.log(
logger.debug(
`[Playlist Import] Deezer preview generated: ${preview.summary.total} tracks, ${preview.summary.inLibrary} in library`
);
res.json(preview);
@@ -102,13 +103,13 @@ router.post("/preview", async (req, res) => {
// Handle Spotify URL
const preview = await spotifyImportService.generatePreview(url);
console.log(
logger.debug(
`[Spotify Import] Preview generated: ${preview.summary.total} tracks, ${preview.summary.inLibrary} in library`
);
res.json(preview);
}
} catch (error: any) {
console.error("Playlist preview error:", error);
logger.error("Playlist preview error:", error);
if (error.name === "ZodError") {
return res.status(400).json({ error: "Invalid request body" });
}
@@ -124,6 +125,9 @@ router.post("/preview", async (req, res) => {
*/
router.post("/import", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const { spotifyPlaylistId, url, playlistName, albumMbidsToDownload } =
importSchema.parse(req.body);
const userId = req.user.id;
@@ -155,10 +159,10 @@ router.post("/import", async (req, res) => {
preview = await spotifyImportService.generatePreview(effectiveUrl);
}
console.log(
logger.debug(
`[Spotify Import] Starting import for user ${userId}: ${playlistName}`
);
console.log(
logger.debug(
`[Spotify Import] Downloading ${albumMbidsToDownload.length} albums`
);
@@ -176,7 +180,7 @@ router.post("/import", async (req, res) => {
message: "Import started",
});
} catch (error: any) {
console.error("Spotify import error:", error);
logger.error("Spotify import error:", error);
if (error.name === "ZodError") {
return res.status(400).json({ error: "Invalid request body" });
}
@@ -192,6 +196,9 @@ router.post("/import", async (req, res) => {
*/
router.get("/import/:jobId/status", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const { jobId } = req.params;
const userId = req.user.id;
@@ -209,7 +216,7 @@ router.get("/import/:jobId/status", async (req, res) => {
res.json(job);
} catch (error: any) {
console.error("Spotify job status error:", error);
logger.error("Spotify job status error:", error);
res.status(500).json({
error: error.message || "Failed to get job status",
});
@@ -222,11 +229,14 @@ router.get("/import/:jobId/status", async (req, res) => {
*/
router.get("/imports", async (req, res) => {
try {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" });
}
const userId = req.user.id;
const jobs = await spotifyImportService.getUserJobs(userId);
res.json(jobs);
} catch (error: any) {
console.error("Spotify imports error:", error);
logger.error("Spotify imports error:", error);
res.status(500).json({
error: error.message || "Failed to get imports",
});
@@ -240,6 +250,7 @@ router.get("/imports", async (req, res) => {
router.post("/import/:jobId/refresh", async (req, res) => {
try {
const { jobId } = req.params;
if (!req.user) return res.status(401).json({ error: "Unauthorized" });
const userId = req.user.id;
const job = await spotifyImportService.getJob(jobId);
@@ -265,7 +276,7 @@ router.post("/import/:jobId/refresh", async (req, res) => {
total: result.total,
});
} catch (error: any) {
console.error("Spotify refresh error:", error);
logger.error("Spotify refresh error:", error);
res.status(500).json({
error: error.message || "Failed to refresh tracks",
});
@@ -279,7 +290,7 @@ router.post("/import/:jobId/refresh", async (req, res) => {
router.post("/import/:jobId/cancel", async (req, res) => {
try {
const { jobId } = req.params;
const userId = req.user.id;
const userId = req.user!.id;
const job = await spotifyImportService.getJob(jobId);
if (!job) {
@@ -303,7 +314,7 @@ router.post("/import/:jobId/cancel", async (req, res) => {
tracksMatched: result.tracksMatched,
});
} catch (error: any) {
console.error("Spotify cancel error:", error);
logger.error("Spotify cancel error:", error);
res.status(500).json({
error: error.message || "Failed to cancel import",
});
@@ -324,7 +335,7 @@ router.get("/import/session-log", async (req, res) => {
content: log,
});
} catch (error: any) {
console.error("Session log error:", error);
logger.error("Session log error:", error);
res.status(500).json({
error: error.message || "Failed to read session log",
});

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import { logger } from "../utils/logger";
import { requireAuth, requireAdmin } from "../middleware/auth";
import { prisma } from "../utils/db";
import { z } from "zod";
@@ -17,7 +18,7 @@ function safeDecrypt(value: string | null): string | null {
try {
return decrypt(value);
} catch (error) {
console.warn("[Settings Route] Failed to decrypt field, returning null");
logger.warn("[Settings Route] Failed to decrypt field, returning null");
return null;
}
}
@@ -31,6 +32,7 @@ const systemSettingsSchema = z.object({
lidarrEnabled: z.boolean().optional(),
lidarrUrl: z.string().optional(),
lidarrApiKey: z.string().nullable().optional(),
lidarrWebhookSecret: z.string().nullable().optional(),
// AI Services
openaiEnabled: z.boolean().optional(),
@@ -41,6 +43,8 @@ const systemSettingsSchema = z.object({
fanartEnabled: z.boolean().optional(),
fanartApiKey: z.string().nullable().optional(),
lastfmApiKey: z.string().nullable().optional(),
// Media Services
audiobookshelfEnabled: z.boolean().optional(),
audiobookshelfUrl: z.string().optional(),
@@ -66,10 +70,11 @@ const systemSettingsSchema = z.object({
maxConcurrentDownloads: z.number().optional(),
downloadRetryAttempts: z.number().optional(),
transcodeCacheMaxGb: z.number().optional(),
soulseekConcurrentDownloads: z.number().min(1).max(10).optional(),
// Download Preferences
downloadSource: z.enum(["soulseek", "lidarr"]).optional(),
soulseekFallback: z.enum(["none", "lidarr"]).optional(),
primaryFailureFallback: z.enum(["none", "lidarr", "soulseek"]).optional(),
});
// GET /system-settings
@@ -107,8 +112,10 @@ router.get("/", async (req, res) => {
const decryptedSettings = {
...settings,
lidarrApiKey: safeDecrypt(settings.lidarrApiKey),
lidarrWebhookSecret: safeDecrypt(settings.lidarrWebhookSecret),
openaiApiKey: safeDecrypt(settings.openaiApiKey),
fanartApiKey: safeDecrypt(settings.fanartApiKey),
lastfmApiKey: safeDecrypt(settings.lastfmApiKey),
audiobookshelfApiKey: safeDecrypt(settings.audiobookshelfApiKey),
soulseekPassword: safeDecrypt(settings.soulseekPassword),
spotifyClientSecret: safeDecrypt(settings.spotifyClientSecret),
@@ -116,7 +123,7 @@ router.get("/", async (req, res) => {
res.json(decryptedSettings);
} catch (error) {
console.error("Get system settings error:", error);
logger.error("Get system settings error:", error);
res.status(500).json({ error: "Failed to get system settings" });
}
});
@@ -126,8 +133,8 @@ router.post("/", async (req, res) => {
try {
const data = systemSettingsSchema.parse(req.body);
console.log("[SYSTEM SETTINGS] Saving settings...");
console.log(
logger.debug("[SYSTEM SETTINGS] Saving settings...");
logger.debug(
"[SYSTEM SETTINGS] transcodeCacheMaxGb:",
data.transcodeCacheMaxGb
);
@@ -137,10 +144,14 @@ router.post("/", async (req, res) => {
if (data.lidarrApiKey)
encryptedData.lidarrApiKey = encrypt(data.lidarrApiKey);
if (data.lidarrWebhookSecret)
encryptedData.lidarrWebhookSecret = encrypt(data.lidarrWebhookSecret);
if (data.openaiApiKey)
encryptedData.openaiApiKey = encrypt(data.openaiApiKey);
if (data.fanartApiKey)
encryptedData.fanartApiKey = encrypt(data.fanartApiKey);
if (data.lastfmApiKey)
encryptedData.lastfmApiKey = encrypt(data.lastfmApiKey);
if (data.audiobookshelfApiKey)
encryptedData.audiobookshelfApiKey = encrypt(
data.audiobookshelfApiKey
@@ -161,19 +172,27 @@ router.post("/", async (req, res) => {
invalidateSystemSettingsCache();
// Refresh Last.fm API key if it was updated
try {
const { lastFmService } = await import("../services/lastfm");
await lastFmService.refreshApiKey();
} catch (err) {
logger.warn("Failed to refresh Last.fm API key:", err);
}
// If Audiobookshelf was disabled, clear all audiobook-related data
if (data.audiobookshelfEnabled === false) {
console.log(
logger.debug(
"[CLEANUP] Audiobookshelf disabled - clearing all audiobook data from database"
);
try {
const deletedProgress =
await prisma.audiobookProgress.deleteMany({});
console.log(
logger.debug(
` Deleted ${deletedProgress.count} audiobook progress entries`
);
} catch (clearError) {
console.error("Failed to clear audiobook data:", clearError);
logger.error("Failed to clear audiobook data:", clearError);
// Don't fail the request
}
}
@@ -191,28 +210,28 @@ router.post("/", async (req, res) => {
SOULSEEK_USERNAME: data.soulseekUsername || null,
SOULSEEK_PASSWORD: data.soulseekPassword || null,
});
console.log(".env file synchronized with database settings");
logger.debug(".env file synchronized with database settings");
} catch (envError) {
console.error("Failed to write .env file:", envError);
logger.error("Failed to write .env file:", envError);
// Don't fail the request if .env write fails
}
// Auto-configure Lidarr webhook if Lidarr is enabled
if (data.lidarrEnabled && data.lidarrUrl && data.lidarrApiKey) {
try {
console.log("[LIDARR] Auto-configuring webhook...");
logger.debug("[LIDARR] Auto-configuring webhook...");
const axios = (await import("axios")).default;
const lidarrUrl = data.lidarrUrl;
const apiKey = data.lidarrApiKey;
// Determine webhook URL
// Use LIDIFY_CALLBACK_URL env var if set, otherwise default to host.docker.internal:3030
// Port 3030 is the external Nginx port that Lidarr can reach
const callbackHost = process.env.LIDIFY_CALLBACK_URL || "http://host.docker.internal:3030";
// Use LIDIFY_CALLBACK_URL env var if set, otherwise default to backend:3006
// In Docker, services communicate via Docker network names (backend, lidarr, etc.)
const callbackHost = process.env.LIDIFY_CALLBACK_URL || "http://backend:3006";
const webhookUrl = `${callbackHost}/api/webhooks/lidarr`;
console.log(` Webhook URL: ${webhookUrl}`);
logger.debug(` Webhook URL: ${webhookUrl}`);
// Check if webhook already exists - find by name "Lidify" OR by URL containing "lidify" or "webhooks/lidarr"
const notificationsResponse = await axios.get(
@@ -241,10 +260,10 @@ router.post("/", async (req, res) => {
if (existingWebhook) {
const currentUrl = existingWebhook.fields?.find((f: any) => f.name === "url")?.value;
console.log(` Found existing webhook: "${existingWebhook.name}" with URL: ${currentUrl}`);
logger.debug(` Found existing webhook: "${existingWebhook.name}" with URL: ${currentUrl}`);
if (currentUrl !== webhookUrl) {
console.log(` URL needs updating from: ${currentUrl}`);
console.log(` URL will be updated to: ${webhookUrl}`);
logger.debug(` URL needs updating from: ${currentUrl}`);
logger.debug(` URL will be updated to: ${webhookUrl}`);
}
}
@@ -293,7 +312,7 @@ router.post("/", async (req, res) => {
timeout: 10000,
}
);
console.log(" Webhook updated");
logger.debug(" Webhook updated");
} else {
// Create new webhook (use forceSave to skip test)
await axios.post(
@@ -304,22 +323,22 @@ router.post("/", async (req, res) => {
timeout: 10000,
}
);
console.log(" Webhook created");
logger.debug(" Webhook created");
}
console.log("Lidarr webhook configured automatically\n");
logger.debug("Lidarr webhook configured automatically\n");
} catch (webhookError: any) {
console.error(
logger.error(
"Failed to auto-configure webhook:",
webhookError.message
);
if (webhookError.response?.data) {
console.error(
logger.error(
" Lidarr error details:",
JSON.stringify(webhookError.response.data, null, 2)
);
}
console.log(
logger.debug(
" User can configure webhook manually in Lidarr UI\n"
);
// Don't fail the request if webhook config fails
@@ -338,7 +357,7 @@ router.post("/", async (req, res) => {
.status(400)
.json({ error: "Invalid settings", details: error.errors });
}
console.error("Update system settings error:", error);
logger.error("Update system settings error:", error);
res.status(500).json({ error: "Failed to update system settings" });
}
});
@@ -348,7 +367,7 @@ router.post("/test-lidarr", async (req, res) => {
try {
const { url, apiKey } = req.body;
console.log("[Lidarr Test] Testing connection to:", url);
logger.debug("[Lidarr Test] Testing connection to:", url);
if (!url || !apiKey) {
return res
@@ -368,7 +387,7 @@ router.post("/test-lidarr", async (req, res) => {
}
);
console.log(
logger.debug(
"[Lidarr Test] Connection successful, version:",
response.data.version
);
@@ -379,8 +398,8 @@ router.post("/test-lidarr", async (req, res) => {
version: response.data.version,
});
} catch (error: any) {
console.error("[Lidarr Test] Error:", error.message);
console.error(
logger.error("[Lidarr Test] Error:", error.message);
logger.error(
"[Lidarr Test] Details:",
error.response?.data || error.code
);
@@ -433,7 +452,7 @@ router.post("/test-openai", async (req, res) => {
model: response.data.model,
});
} catch (error: any) {
console.error("OpenAI test error:", error.message);
logger.error("OpenAI test error:", error.message);
res.status(500).json({
error: "Failed to connect to OpenAI",
details: error.response?.data?.error?.message || error.message,
@@ -469,7 +488,7 @@ router.post("/test-fanart", async (req, res) => {
message: "Fanart.tv connection successful",
});
} catch (error: any) {
console.error("Fanart.tv test error:", error.message);
logger.error("Fanart.tv test error:", error.message);
if (error.response?.status === 401) {
res.status(401).json({
error: "Invalid Fanart.tv API key",
@@ -483,6 +502,59 @@ router.post("/test-fanart", async (req, res) => {
}
});
// Test Last.fm connection
router.post("/test-lastfm", async (req, res) => {
try {
const { lastfmApiKey } = req.body;
if (!lastfmApiKey) {
return res.status(400).json({ error: "API key is required" });
}
const axios = require("axios");
// Test with a known artist (The Beatles)
const testArtist = "The Beatles";
const response = await axios.get(
"http://ws.audioscrobbler.com/2.0/",
{
params: {
method: "artist.getinfo",
artist: testArtist,
api_key: lastfmApiKey,
format: "json",
},
timeout: 5000,
}
);
// If we get here and have artist data, the API key is valid
if (response.data.artist) {
res.json({
success: true,
message: "Last.fm connection successful",
});
} else {
res.status(500).json({
error: "Unexpected response from Last.fm",
});
}
} catch (error: any) {
logger.error("Last.fm test error:", error.message);
if (error.response?.status === 403 || error.response?.data?.error === 10) {
res.status(401).json({
error: "Invalid Last.fm API key",
});
} else {
res.status(500).json({
error: "Failed to connect to Last.fm",
details: error.response?.data || error.message,
});
}
}
});
// Test Audiobookshelf connection
router.post("/test-audiobookshelf", async (req, res) => {
try {
@@ -509,7 +581,7 @@ router.post("/test-audiobookshelf", async (req, res) => {
libraries: response.data.libraries?.length || 0,
});
} catch (error: any) {
console.error("Audiobookshelf test error:", error.message);
logger.error("Audiobookshelf test error:", error.message);
if (error.response?.status === 401 || error.response?.status === 403) {
res.status(401).json({
error: "Invalid Audiobookshelf API key",
@@ -534,7 +606,7 @@ router.post("/test-soulseek", async (req, res) => {
});
}
console.log(`[SOULSEEK-TEST] Testing connection as "${username}"...`);
logger.debug(`[SOULSEEK-TEST] Testing connection as "${username}"...`);
// Import soulseek service
const { soulseekService } = await import("../services/soulseek");
@@ -550,10 +622,10 @@ router.post("/test-soulseek", async (req, res) => {
{ user: username, pass: password },
(err: Error | null, client: any) => {
if (err) {
console.log(`[SOULSEEK-TEST] Connection failed: ${err.message}`);
logger.debug(`[SOULSEEK-TEST] Connection failed: ${err.message}`);
return reject(err);
}
console.log(`[SOULSEEK-TEST] Connected successfully`);
logger.debug(`[SOULSEEK-TEST] Connected successfully`);
// We don't need to keep the connection open for the test
resolve();
}
@@ -567,14 +639,14 @@ router.post("/test-soulseek", async (req, res) => {
isConnected: true,
});
} catch (connectError: any) {
console.error(`[SOULSEEK-TEST] Error: ${connectError.message}`);
logger.error(`[SOULSEEK-TEST] Error: ${connectError.message}`);
res.status(401).json({
error: "Invalid Soulseek credentials or connection failed",
details: connectError.message,
});
}
} catch (error: any) {
console.error("[SOULSEEK-TEST] Error:", error.message);
logger.error("[SOULSEEK-TEST] Error:", error.message);
res.status(500).json({
error: "Failed to test Soulseek connection",
details: error.message,
@@ -593,22 +665,39 @@ router.post("/test-spotify", async (req, res) => {
});
}
// Import spotifyService to test credentials
const { spotifyService } = await import("../services/spotify");
const result = await spotifyService.testCredentials(clientId, clientSecret);
// Test credentials by trying to get an access token
const axios = require("axios");
try {
const response = await axios.post(
"https://accounts.spotify.com/api/token",
"grant_type=client_credentials",
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
Authorization: `Basic ${Buffer.from(`${clientId}:${clientSecret}`).toString("base64")}`,
},
timeout: 10000,
}
);
if (result.success) {
res.json({
success: true,
message: "Spotify credentials are valid",
});
} else {
if (response.data.access_token) {
res.json({
success: true,
message: "Spotify credentials are valid",
});
} else {
res.status(401).json({
error: "Invalid Spotify credentials",
});
}
} catch (tokenError: any) {
res.status(401).json({
error: result.error || "Invalid Spotify credentials",
error: "Invalid Spotify credentials",
details: tokenError.response?.data?.error_description || tokenError.message,
});
}
} catch (error: any) {
console.error("Spotify test error:", error.message);
logger.error("Spotify test error:", error.message);
res.status(500).json({
error: "Failed to test Spotify credentials",
details: error.message,
@@ -661,7 +750,7 @@ router.post("/clear-caches", async (req, res) => {
);
if (keysToDelete.length > 0) {
console.log(
logger.debug(
`[CACHE] Clearing ${
keysToDelete.length
} cache entries (excluding ${
@@ -671,7 +760,7 @@ router.post("/clear-caches", async (req, res) => {
for (const key of keysToDelete) {
await redisClient.del(key);
}
console.log(
logger.debug(
`[CACHE] Successfully cleared ${keysToDelete.length} cache entries`
);
@@ -701,7 +790,7 @@ router.post("/clear-caches", async (req, res) => {
});
}
} catch (error: any) {
console.error("Clear caches error:", error);
logger.error("Clear caches error:", error);
res.status(500).json({
error: "Failed to clear caches",
details: error.message,

View File

@@ -6,15 +6,26 @@
*/
import { Router } from "express";
import { prisma } from "../utils/db";
import { scanQueue } from "../workers/queues";
import { discoverWeeklyService } from "../services/discoverWeekly";
import { simpleDownloadManager } from "../services/simpleDownloadManager";
import { queueCleaner } from "../jobs/queueCleaner";
import { getSystemSettings } from "../utils/systemSettings";
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
const router = Router();
// GET /webhooks/lidarr/verify - Webhook verification endpoint
router.get("/lidarr/verify", (req, res) => {
logger.debug("[WEBHOOK] Verification request received");
res.json({
status: "ok",
timestamp: new Date().toISOString(),
service: "lidify",
version: process.env.npm_package_version || "unknown",
});
});
// POST /webhooks/lidarr - Handle Lidarr webhooks
router.post("/lidarr", async (req, res) => {
try {
@@ -25,7 +36,7 @@ router.post("/lidarr", async (req, res) => {
!settings?.lidarrUrl ||
!settings?.lidarrApiKey
) {
console.log(
logger.debug(
`[WEBHOOK] Lidarr webhook received but Lidarr is disabled. Ignoring.`
);
return res.status(202).json({
@@ -35,12 +46,27 @@ router.post("/lidarr", async (req, res) => {
});
}
// Verify webhook secret if configured
// Note: settings.lidarrWebhookSecret is already decrypted by getSystemSettings()
if (settings.lidarrWebhookSecret) {
const providedSecret = req.headers["x-webhook-secret"] as string;
if (!providedSecret || providedSecret !== settings.lidarrWebhookSecret) {
logger.debug(
`[WEBHOOK] Lidarr webhook received with invalid or missing secret`
);
return res.status(401).json({
error: "Unauthorized - Invalid webhook secret",
});
}
}
const eventType = req.body.eventType;
console.log(`[WEBHOOK] Lidarr event: ${eventType}`);
logger.debug(`[WEBHOOK] Lidarr event: ${eventType}`);
// Log payload in debug mode only (avoid verbose logs in production)
if (process.env.DEBUG_WEBHOOKS === "true") {
console.log(` Payload:`, JSON.stringify(req.body, null, 2));
logger.debug(` Payload:`, JSON.stringify(req.body, null, 2));
}
switch (eventType) {
@@ -68,16 +94,16 @@ router.post("/lidarr", async (req, res) => {
break;
case "Test":
console.log(" Lidarr test webhook received");
logger.debug(" Lidarr test webhook received");
break;
default:
console.log(` Unhandled event: ${eventType}`);
logger.debug(` Unhandled event: ${eventType}`);
}
res.json({ success: true });
} catch (error: any) {
console.error("Webhook error:", error.message);
logger.error("Webhook error:", error.message);
res.status(500).json({ error: "Webhook processing failed" });
}
});
@@ -93,12 +119,12 @@ async function handleGrab(payload: any) {
const artistName = payload.artist?.name;
const lidarrAlbumId = payload.albums?.[0]?.id;
console.log(` Album: ${artistName} - ${albumTitle}`);
console.log(` Download ID: ${downloadId}`);
console.log(` MBID: ${albumMbid}`);
logger.debug(` Album: ${artistName} - ${albumTitle}`);
logger.debug(` Download ID: ${downloadId}`);
logger.debug(` MBID: ${albumMbid}`);
if (!downloadId) {
console.log(` Missing downloadId, skipping`);
logger.debug(` Missing downloadId, skipping`);
return;
}
@@ -128,13 +154,13 @@ async function handleDownload(payload: any) {
payload.album?.foreignAlbumId || payload.albums?.[0]?.foreignAlbumId;
const lidarrAlbumId = payload.album?.id || payload.albums?.[0]?.id;
console.log(` Album: ${artistName} - ${albumTitle}`);
console.log(` Download ID: ${downloadId}`);
console.log(` Album MBID: ${albumMbid}`);
console.log(` Lidarr Album ID: ${lidarrAlbumId}`);
logger.debug(` Album: ${artistName} - ${albumTitle}`);
logger.debug(` Download ID: ${downloadId}`);
logger.debug(` Album MBID: ${albumMbid}`);
logger.debug(` Lidarr Album ID: ${lidarrAlbumId}`);
if (!downloadId) {
console.log(` Missing downloadId, skipping`);
logger.debug(` Missing downloadId, skipping`);
return;
}
@@ -148,36 +174,30 @@ async function handleDownload(payload: any) {
);
if (result.jobId) {
// Check if this is part of a download batch (artist download)
if (result.downloadBatchId) {
// Check if all jobs in the batch are complete
const batchComplete = await checkDownloadBatchComplete(
result.downloadBatchId
);
if (batchComplete) {
console.log(
` All albums in batch complete, triggering library scan...`
);
await scanQueue.add("scan", {
type: "full",
source: "lidarr-import-batch",
});
} else {
console.log(` Batch not complete, skipping scan`);
}
} else if (!result.batchId) {
// Single album download (not part of discovery batch)
console.log(` Triggering library scan...`);
await scanQueue.add("scan", {
type: "full",
source: "lidarr-import",
});
}
// If part of discovery batch, the download manager already called checkBatchCompletion
// Find the download job that triggered this webhook to get userId
const downloadJob = await prisma.downloadJob.findUnique({
where: { id: result.jobId },
select: { userId: true, id: true },
});
// Trigger scan immediately for this album (incremental scan with enrichment data)
// Don't wait for batch completion - enrichment should happen per-album
logger.debug(
` Triggering incremental scan for: ${artistName} - ${albumTitle}`
);
await scanQueue.add("scan", {
userId: downloadJob?.userId || null,
source: "lidarr-webhook",
artistName: artistName,
albumMbid: albumMbid,
downloadId: result.jobId,
});
// Discovery batch completion (for playlist building) is handled by download manager
} else {
// No job found - this might be an external download not initiated by us
// Still trigger a scan to pick up the new music
console.log(` No matching job, triggering scan anyway...`);
logger.debug(` No matching job, triggering scan anyway...`);
await scanQueue.add("scan", {
type: "full",
source: "lidarr-import-external",
@@ -185,26 +205,6 @@ async function handleDownload(payload: any) {
}
}
/**
* Check if all jobs in a download batch are complete
*/
async function checkDownloadBatchComplete(batchId: string): Promise<boolean> {
const pendingJobs = await prisma.downloadJob.count({
where: {
metadata: {
path: ["batchId"],
equals: batchId,
},
status: { in: ["pending", "processing"] },
},
});
console.log(
` Batch ${batchId}: ${pendingJobs} pending/processing jobs remaining`
);
return pendingJobs === 0;
}
/**
* Handle import failure with automatic retry
*/
@@ -215,12 +215,12 @@ async function handleImportFailure(payload: any) {
const albumTitle = payload.album?.title || payload.release?.title;
const reason = payload.message || "Import failed";
console.log(` Album: ${albumTitle}`);
console.log(` Download ID: ${downloadId}`);
console.log(` Reason: ${reason}`);
logger.debug(` Album: ${albumTitle}`);
logger.debug(` Download ID: ${downloadId}`);
logger.debug(` Reason: ${reason}`);
if (!downloadId) {
console.log(` Missing downloadId, skipping`);
logger.debug(` Missing downloadId, skipping`);
return;
}

View File

@@ -0,0 +1,850 @@
/**
* Unified Acquisition Service
*
* Consolidates album/track acquisition logic from Discovery Weekly and Playlist Import.
* Handles download source selection, behavior matrix routing, and job tracking.
*
* Phase 2.1: Initial implementation
* - Behavior matrix logic for primary/fallback source selection
* - Soulseek album acquisition (track list → batch download)
* - Lidarr album acquisition (webhook-based completion)
* - DownloadJob management with context-based tracking
*/
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import { getSystemSettings } from "../utils/systemSettings";
import { soulseekService } from "./soulseek";
import { simpleDownloadManager } from "./simpleDownloadManager";
import { musicBrainzService } from "./musicbrainz";
import { lastFmService } from "./lastfm";
import { AcquisitionError, AcquisitionErrorType } from "./lidarr";
import PQueue from "p-queue";
// ============================================
// TYPE DEFINITIONS
// ============================================
/**
* Context for tracking acquisition origin
* Used to link download jobs to their source (Discovery batch or Spotify import)
*/
export interface AcquisitionContext {
userId: string;
discoveryBatchId?: string;
spotifyImportJobId?: string;
existingJobId?: string;
}
/**
* Request to acquire an album
*/
export interface AlbumAcquisitionRequest {
albumTitle: string;
artistName: string;
mbid?: string;
lastfmUrl?: string;
requestedTracks?: Array<{ title: string; position?: number }>;
}
/**
* Request to acquire individual tracks (for Unknown Album case)
*/
export interface TrackAcquisitionRequest {
trackTitle: string;
artistName: string;
albumTitle?: string;
}
/**
* Result of an acquisition attempt
*/
export interface AcquisitionResult {
success: boolean;
downloadJobId?: number;
source?: "soulseek" | "lidarr";
error?: string;
errorType?: AcquisitionErrorType;
isRecoverable?: boolean;
tracksDownloaded?: number;
tracksTotal?: number;
correlationId?: string;
}
/**
* Service availability check result
*/
interface ServiceAvailability {
lidarrAvailable: boolean;
soulseekAvailable: boolean;
}
/**
* Download behavior matrix configuration
*/
interface DownloadBehavior {
hasPrimarySource: boolean;
primarySource: "soulseek" | "lidarr" | null;
hasFallbackSource: boolean;
fallbackSource: "soulseek" | "lidarr" | null;
}
// ============================================
// ACQUISITION SERVICE
// ============================================
class AcquisitionService {
private albumQueue: PQueue;
constructor() {
// Initialize album queue with concurrency of 2 (configurable)
this.albumQueue = new PQueue({ concurrency: 2 });
logger.debug(
"[Acquisition] Initialized album queue with concurrency=2"
);
}
/**
* Get download behavior configuration (settings + service availability)
* Auto-detects and selects download source based on actual availability
*/
private async getDownloadBehavior(): Promise<DownloadBehavior> {
const settings = await getSystemSettings();
// Get download source settings
const downloadSource = settings?.downloadSource || "soulseek";
const primaryFailureFallback =
settings?.primaryFailureFallback || "none";
// Determine actual availability
const hasSoulseek = await soulseekService.isAvailable();
const hasLidarr = !!(
settings?.lidarrEnabled &&
settings?.lidarrUrl &&
settings?.lidarrApiKey
);
// Case 1: No sources available
if (!hasSoulseek && !hasLidarr) {
logger.debug(
"[Acquisition] Available sources: Lidarr=false, Soulseek=false"
);
logger.error("[Acquisition] No download sources configured");
return {
hasPrimarySource: false,
primarySource: null,
hasFallbackSource: false,
fallbackSource: null,
};
}
// Case 2: Only one source available - use it regardless of preference
if (hasSoulseek && !hasLidarr) {
logger.debug(
"[Acquisition] Available sources: Lidarr=false, Soulseek=true"
);
logger.debug(
"[Acquisition] Using Soulseek as primary source (only source available)"
);
logger.debug(
"[Acquisition] No fallback configured (only one source available)"
);
return {
hasPrimarySource: true,
primarySource: "soulseek",
hasFallbackSource: false,
fallbackSource: null,
};
}
if (hasLidarr && !hasSoulseek) {
logger.debug(
"[Acquisition] Available sources: Lidarr=true, Soulseek=false"
);
logger.debug(
"[Acquisition] Using Lidarr as primary source (only source available)"
);
logger.debug(
"[Acquisition] No fallback configured (only one source available)"
);
return {
hasPrimarySource: true,
primarySource: "lidarr",
hasFallbackSource: false,
fallbackSource: null,
};
}
// Case 3: Both available - respect user preference for primary
const userPrimary = downloadSource; // "soulseek" or "lidarr"
const alternative = userPrimary === "soulseek" ? "lidarr" : "soulseek";
// Auto-enable fallback if both sources are configured and no explicit setting
let useFallback =
primaryFailureFallback !== "none" &&
primaryFailureFallback === alternative;
// Auto-fallback: If both sources available and no explicit fallback set, enable it
if (!useFallback && primaryFailureFallback === "none") {
useFallback = true;
logger.debug(
`[Acquisition] Auto-enabled fallback: ${alternative} (both sources configured)`
);
}
logger.debug(
"[Acquisition] Available sources: Lidarr=true, Soulseek=true"
);
logger.debug(
`[Acquisition] Using ${userPrimary} as primary source (user preference)`
);
logger.debug(
`[Acquisition] Fallback configured: ${
useFallback ? alternative : "none"
}`
);
return {
hasPrimarySource: true,
primarySource: userPrimary,
hasFallbackSource: useFallback,
fallbackSource: useFallback ? alternative : null,
};
}
/**
* Update download job with source-specific status text
* Stored in metadata for frontend display
*/
private async updateJobStatusText(
jobId: string,
source: "lidarr" | "soulseek",
attemptNumber: number
): Promise<void> {
const sourceLabel = source.charAt(0).toUpperCase() + source.slice(1);
const statusText = `${sourceLabel} #${attemptNumber}`;
const job = await prisma.downloadJob.findUnique({
where: { id: jobId },
select: { metadata: true },
});
const existingMetadata = (job?.metadata as any) || {};
await prisma.downloadJob.update({
where: { id: jobId },
data: {
metadata: {
...existingMetadata,
currentSource: source,
lidarrAttempts:
source === "lidarr"
? attemptNumber
: existingMetadata.lidarrAttempts || 0,
soulseekAttempts:
source === "soulseek"
? attemptNumber
: existingMetadata.soulseekAttempts || 0,
statusText,
},
},
});
logger.debug(`[Acquisition] Updated job ${jobId}: ${statusText}`);
}
/**
* Acquire an album using the configured behavior matrix
* Routes to Soulseek or Lidarr based on settings, with fallback support
* Queued to enable parallel album acquisition
*
* @param request - Album to acquire
* @param context - Tracking context (userId, batchId, etc.)
* @returns Acquisition result
*/
async acquireAlbum(
request: AlbumAcquisitionRequest,
context: AcquisitionContext
): Promise<AcquisitionResult> {
return this.albumQueue.add(() =>
this.acquireAlbumInternal(request, context)
);
}
/**
* Internal album acquisition logic (called via queue)
*/
private async acquireAlbumInternal(
request: AlbumAcquisitionRequest,
context: AcquisitionContext
): Promise<AcquisitionResult> {
logger.debug(
`\n[Acquisition] Acquiring album: ${request.artistName} - ${request.albumTitle} (queue: ${this.albumQueue.size} pending, ${this.albumQueue.pending} active)`
);
// Verify artist name before acquisition
try {
const correction = await lastFmService.getArtistCorrection(
request.artistName
);
if (correction?.corrected) {
logger.debug(
`[Acquisition] Artist corrected: "${request.artistName}" → "${correction.canonicalName}"`
);
request = { ...request, artistName: correction.canonicalName };
}
} catch (error) {
logger.warn(
`[Acquisition] Artist correction failed for "${request.artistName}":`,
error
);
}
// Get download behavior configuration
const behavior = await this.getDownloadBehavior();
// Validate at least one source is available
if (!behavior.hasPrimarySource) {
const error =
"No download sources available (neither Soulseek nor Lidarr configured)";
logger.error(`[Acquisition] ${error}`);
return { success: false, error };
}
// Try primary source first
let result: AcquisitionResult;
if (behavior.primarySource === "soulseek") {
logger.debug(`[Acquisition] Trying primary: Soulseek`);
result = await this.acquireAlbumViaSoulseek(request, context);
// Fallback to Lidarr if Soulseek fails and fallback is configured
if (!result.success) {
logger.debug(
`[Acquisition] Soulseek failed: ${result.error || "unknown error"}`
);
logger.debug(
`[Acquisition] Fallback available: hasFallback=${behavior.hasFallbackSource}, source=${behavior.fallbackSource}`
);
if (
behavior.hasFallbackSource &&
behavior.fallbackSource === "lidarr"
) {
logger.debug(
`[Acquisition] Attempting Lidarr fallback...`
);
result = await this.acquireAlbumViaLidarr(request, context);
} else {
logger.debug(
`[Acquisition] No fallback configured or fallback not Lidarr`
);
}
}
} else if (behavior.primarySource === "lidarr") {
logger.debug(`[Acquisition] Trying primary: Lidarr`);
result = await this.acquireAlbumViaLidarr(request, context);
// Fallback to Soulseek if Lidarr fails and fallback is configured
if (!result.success) {
logger.debug(
`[Acquisition] Lidarr failed: ${result.error || "unknown error"}`
);
logger.debug(
`[Acquisition] Fallback available: hasFallback=${behavior.hasFallbackSource}, source=${behavior.fallbackSource}`
);
if (
behavior.hasFallbackSource &&
behavior.fallbackSource === "soulseek"
) {
logger.debug(
`[Acquisition] Attempting Soulseek fallback...`
);
result = await this.acquireAlbumViaSoulseek(request, context);
} else {
logger.debug(
`[Acquisition] No fallback configured or fallback not Soulseek`
);
}
}
} else {
// This should never happen due to validation above
const error = "No primary source configured";
logger.error(`[Acquisition] ${error}`);
return { success: false, error };
}
return result;
}
/**
* Acquire individual tracks via Soulseek (for Unknown Album case)
* Batch downloads tracks without album MBID
*
* @param requests - Tracks to acquire
* @param context - Tracking context
* @returns Array of acquisition results
*/
async acquireTracks(
requests: TrackAcquisitionRequest[],
context: AcquisitionContext
): Promise<AcquisitionResult[]> {
logger.debug(
`\n[Acquisition] Acquiring ${requests.length} individual tracks via Soulseek`
);
// Check Soulseek availability
const soulseekAvailable = await soulseekService.isAvailable();
if (!soulseekAvailable) {
logger.error(
`[Acquisition] Soulseek not available for track downloads`
);
return requests.map(() => ({
success: false,
error: "Soulseek not configured",
}));
}
// Get music path
const settings = await getSystemSettings();
const musicPath = settings?.musicPath;
if (!musicPath) {
logger.error(`[Acquisition] Music path not configured`);
return requests.map(() => ({
success: false,
error: "Music path not configured",
}));
}
// Prepare tracks for batch download
const tracksToDownload = requests.map((req) => ({
artist: req.artistName,
title: req.trackTitle,
album: req.albumTitle || "Unknown Album",
}));
try {
// Use Soulseek batch download
const batchResult = await soulseekService.searchAndDownloadBatch(
tracksToDownload,
musicPath,
settings?.soulseekConcurrentDownloads || 4 // concurrency
);
logger.debug(
`[Acquisition] Batch result: ${batchResult.successful}/${requests.length} tracks downloaded`
);
// Create individual results for each track
const results: AcquisitionResult[] = requests.map((req, index) => {
// Check if this track was in the successful list
// Note: We don't have per-track success info from batch, so we estimate
const success = index < batchResult.successful;
return {
success,
source: "soulseek" as const,
tracksDownloaded: success ? 1 : 0,
tracksTotal: 1,
error: success
? undefined
: batchResult.errors[index] || "Download failed",
};
});
return results;
} catch (error: any) {
logger.error(
`[Acquisition] Batch track download error: ${error.message}`
);
return requests.map(() => ({
success: false,
error: error.message,
}));
}
}
/**
* Acquire album via Soulseek (track-by-track download)
* Gets track list from MusicBrainz or Last.fm, then batch downloads
* Marks job as completed immediately (no webhook needed)
*
* @param request - Album to acquire
* @param context - Tracking context
* @returns Acquisition result
*/
private async acquireAlbumViaSoulseek(
request: AlbumAcquisitionRequest,
context: AcquisitionContext
): Promise<AcquisitionResult> {
logger.debug(
`[Acquisition/Soulseek] Downloading: ${request.artistName} - ${request.albumTitle}`
);
// Get music path
const settings = await getSystemSettings();
const musicPath = settings?.musicPath;
if (!musicPath) {
return { success: false, error: "Music path not configured" };
}
if (!request.mbid) {
return {
success: false,
error: "Album MBID required for Soulseek download",
};
}
let job: any;
try {
// Create download job at start for tracking
job = await this.createDownloadJob(request, context);
// Calculate attempt number (existing soulseek attempts + 1)
const jobMetadata = (job.metadata as any) || {};
const soulseekAttempts = (jobMetadata.soulseekAttempts || 0) + 1;
await this.updateJobStatusText(
job.id,
"soulseek",
soulseekAttempts
);
let tracks: Array<{ title: string; position?: number }>;
// If specific tracks requested, use those instead of full album
if (request.requestedTracks && request.requestedTracks.length > 0) {
tracks = request.requestedTracks;
logger.debug(
`[Acquisition/Soulseek] Using ${tracks.length} requested tracks (not full album)`
);
} else {
// Strategy 1: Get track list from MusicBrainz
tracks = await musicBrainzService.getAlbumTracks(request.mbid);
// Strategy 2: Fallback to Last.fm (always try when MusicBrainz fails)
if (!tracks || tracks.length === 0) {
logger.debug(
`[Acquisition/Soulseek] MusicBrainz has no tracks, trying Last.fm`
);
try {
const albumInfo = await lastFmService.getAlbumInfo(
request.artistName,
request.albumTitle
);
const lastFmTracks = albumInfo?.tracks?.track || [];
if (Array.isArray(lastFmTracks) && lastFmTracks.length > 0) {
tracks = lastFmTracks.map((t: any) => ({
title: t.name || t.title,
position: t["@attr"]?.rank
? parseInt(t["@attr"].rank)
: undefined,
}));
logger.debug(
`[Acquisition/Soulseek] Got ${tracks.length} tracks from Last.fm`
);
}
} catch (lastfmError: any) {
logger.warn(
`[Acquisition/Soulseek] Last.fm fallback failed: ${lastfmError.message}`
);
}
}
if (!tracks || tracks.length === 0) {
// Mark job as failed
await this.updateJobStatus(
job.id,
"failed",
"Could not get track list from MusicBrainz or Last.fm"
);
return {
success: false,
error: "Could not get track list from MusicBrainz or Last.fm",
};
}
logger.debug(
`[Acquisition/Soulseek] Found ${tracks.length} tracks for album`
);
}
// Prepare tracks for batch download
const tracksToDownload = tracks.map((track) => ({
artist: request.artistName,
title: track.title,
album: request.albumTitle,
}));
// Use Soulseek batch download (parallel with concurrency limit)
const batchResult = await soulseekService.searchAndDownloadBatch(
tracksToDownload,
musicPath,
settings?.soulseekConcurrentDownloads || 4 // concurrency
);
if (batchResult.successful === 0) {
// Mark job as failed
await this.updateJobStatus(
job.id,
"failed",
`No tracks found on Soulseek (searched ${tracks.length} tracks)`
);
return {
success: false,
tracksTotal: tracks.length,
downloadJobId: parseInt(job.id),
error: `No tracks found on Soulseek (searched ${tracks.length} tracks)`,
};
}
// Success threshold: at least 50% of tracks
const successThreshold = Math.ceil(tracks.length * 0.5);
const isSuccess = batchResult.successful >= successThreshold;
logger.debug(
`[Acquisition/Soulseek] Downloaded ${batchResult.successful}/${tracks.length} tracks (threshold: ${successThreshold})`
);
// Mark job as completed immediately (Soulseek doesn't use webhooks)
await this.updateJobStatus(
job.id,
isSuccess ? "completed" : "failed",
isSuccess
? undefined
: `Only ${batchResult.successful}/${tracks.length} tracks found`
);
// Update job metadata with track counts
await prisma.downloadJob.update({
where: { id: job.id },
data: {
metadata: {
...job.metadata,
tracksDownloaded: batchResult.successful,
tracksTotal: tracks.length,
},
},
});
return {
success: isSuccess,
source: "soulseek",
downloadJobId: parseInt(job.id),
tracksDownloaded: batchResult.successful,
tracksTotal: tracks.length,
error: isSuccess
? undefined
: `Only ${batchResult.successful}/${tracks.length} tracks found`,
};
} catch (error: any) {
logger.error(`[Acquisition/Soulseek] Error: ${error.message}`);
// Update job status if job was created
if (job) {
await this.updateJobStatus(
job.id,
"failed",
error.message
).catch((e) =>
logger.error(
`[Acquisition/Soulseek] Failed to update job status: ${e.message}`
)
);
}
return { success: false, error: error.message };
}
}
/**
* Acquire album via Lidarr (full album download)
* Creates download job and waits for webhook completion
*
* @param request - Album to acquire
* @param context - Tracking context
* @returns Acquisition result
*/
private async acquireAlbumViaLidarr(
request: AlbumAcquisitionRequest,
context: AcquisitionContext
): Promise<AcquisitionResult> {
logger.debug(
`[Acquisition/Lidarr] Downloading: ${request.artistName} - ${request.albumTitle}`
);
if (!request.mbid) {
return {
success: false,
error: "Album MBID required for Lidarr download",
};
}
let job: any;
try {
// Create download job
job = await this.createDownloadJob(request, context);
// Calculate attempt number (existing lidarr attempts + 1)
const jobMetadata = (job.metadata as any) || {};
const lidarrAttempts = (jobMetadata.lidarrAttempts || 0) + 1;
await this.updateJobStatusText(job.id, "lidarr", lidarrAttempts);
// Start Lidarr download
const isDiscovery = !!context.discoveryBatchId;
const result = await simpleDownloadManager.startDownload(
job.id,
request.artistName,
request.albumTitle,
request.mbid,
context.userId,
isDiscovery
);
if (result.success) {
logger.debug(
`[Acquisition/Lidarr] Download started (correlation: ${result.correlationId})`
);
return {
success: true,
source: "lidarr",
downloadJobId: parseInt(job.id),
correlationId: result.correlationId,
};
} else {
logger.error(
`[Acquisition/Lidarr] Failed to start: ${result.error}`
);
// Mark job as failed
await this.updateJobStatus(job.id, "failed", result.error);
// Return structured error info for fallback logic
return {
success: false,
error: result.error,
errorType: result.errorType,
isRecoverable: result.isRecoverable,
};
}
} catch (error: any) {
logger.error(`[Acquisition/Lidarr] Error: ${error.message}`);
// Update job status if job was created
if (job) {
await this.updateJobStatus(
job.id,
"failed",
error.message
).catch((e) =>
logger.error(
`[Acquisition/Lidarr] Failed to update job status: ${e.message}`
)
);
}
return { success: false, error: error.message };
}
}
/**
* Create a DownloadJob for tracking acquisition
* Links to Discovery batch or Spotify import job as appropriate
*
* @param request - Album request
* @param context - Tracking context
* @returns Created download job
*/
private async createDownloadJob(
request: AlbumAcquisitionRequest,
context: AcquisitionContext
): Promise<any> {
// Check for existing job first
if (context.existingJobId) {
logger.debug(
`[Acquisition] Using existing download job: ${context.existingJobId}`
);
return { id: context.existingJobId };
}
// Validate userId before creating download job to prevent foreign key constraint violations
if (!context.userId || typeof context.userId !== 'string' || context.userId === 'NaN' || context.userId === 'undefined' || context.userId === 'null') {
logger.error(
`[Acquisition] Invalid userId in context: ${JSON.stringify({
userId: context.userId,
typeofUserId: typeof context.userId,
albumTitle: request.albumTitle,
artistName: request.artistName
})}`
);
throw new Error(`Invalid userId in acquisition context: ${context.userId}`);
}
const jobData: any = {
userId: context.userId,
subject: `${request.artistName} - ${request.albumTitle}`,
type: "album",
targetMbid: request.mbid || null,
status: "pending",
metadata: {
artistName: request.artistName,
albumTitle: request.albumTitle,
albumMbid: request.mbid,
},
};
// Add context-based tracking
if (context.discoveryBatchId) {
jobData.discoveryBatchId = context.discoveryBatchId;
jobData.metadata.downloadType = "discovery";
}
if (context.spotifyImportJobId) {
jobData.metadata.spotifyImportJobId = context.spotifyImportJobId;
jobData.metadata.downloadType = "spotify_import";
}
const job = await prisma.downloadJob.create({
data: jobData,
});
logger.debug(
`[Acquisition] Created download job: ${job.id} (type: ${
jobData.metadata.downloadType || "library"
})`
);
return job;
}
/**
* Update download job status
*
* @param jobId - Job ID to update
* @param status - New status
* @param error - Optional error message
*/
private async updateJobStatus(
jobId: string,
status: string,
error?: string
): Promise<void> {
await prisma.downloadJob.update({
where: { id: jobId },
data: {
status,
error: error || null,
completedAt:
status === "completed" || status === "failed"
? new Date()
: undefined,
},
});
logger.debug(
`[Acquisition] Updated job ${jobId}: status=${status}${
error ? `, error=${error}` : ""
}`
);
}
}
// Export singleton instance
export const acquisitionService = new AcquisitionService();

View File

@@ -0,0 +1,232 @@
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import { enrichmentFailureService } from "./enrichmentFailureService";
const STALE_THRESHOLD_MINUTES = 5;
const MAX_RETRIES = 3;
const CIRCUIT_BREAKER_THRESHOLD = 30; // Increased from 10 to handle batch operations
const CIRCUIT_BREAKER_WINDOW_MS = 5 * 60 * 1000; // 5 minutes
type CircuitState = 'closed' | 'open' | 'half-open';
class AudioAnalysisCleanupService {
private state: CircuitState = 'closed';
private failureCount = 0;
private lastFailureTime: Date | null = null;
/**
* Check if we should attempt to transition from open to half-open
*/
private shouldAttemptReset(): boolean {
if (!this.lastFailureTime) return false;
const timeSinceFailure = Date.now() - this.lastFailureTime.getTime();
return timeSinceFailure >= CIRCUIT_BREAKER_WINDOW_MS;
}
/**
* Handle successful operation - close circuit if in half-open state
*/
private onSuccess(): void {
if (this.state === 'half-open') {
logger.info(
`[AudioAnalysisCleanup] Circuit breaker CLOSED - recovery successful after ${this.failureCount} failures`
);
this.state = 'closed';
this.failureCount = 0;
this.lastFailureTime = null;
} else if (this.state === 'closed' && this.failureCount > 0) {
// Reset failure counter on success while closed
logger.debug(
"[AudioAnalysisCleanup] Resetting failure counter on success"
);
this.failureCount = 0;
this.lastFailureTime = null;
}
}
/**
* Handle failed operation - update state and counts
*/
private onFailure(resetCount: number, permanentlyFailedCount: number): void {
const totalFailures = resetCount + permanentlyFailedCount;
this.failureCount += totalFailures;
this.lastFailureTime = new Date();
if (this.state === 'half-open') {
// Failed during half-open - reopen circuit
this.state = 'open';
logger.warn(
`[AudioAnalysisCleanup] Circuit breaker REOPENED - recovery attempt failed (${this.failureCount} total failures)`
);
} else if (this.failureCount >= CIRCUIT_BREAKER_THRESHOLD) {
// Exceeded threshold - open circuit
this.state = 'open';
logger.warn(
`[AudioAnalysisCleanup] Circuit breaker OPEN - ${this.failureCount} failures in window. ` +
`Pausing audio analysis queuing until analyzer shows signs of life.`
);
}
}
/**
* Check if circuit breaker is open (too many consecutive failures)
* Automatically transitions to half-open after cooldown period
*/
isCircuitOpen(): boolean {
if (this.state === 'open' && this.shouldAttemptReset()) {
this.state = 'half-open';
logger.info(
`[AudioAnalysisCleanup] Circuit breaker HALF-OPEN - attempting recovery after ${
CIRCUIT_BREAKER_WINDOW_MS / 60000
} minute cooldown`
);
}
return this.state === 'open';
}
/**
* Record success for external callers (maintains backward compatibility)
*/
recordSuccess(): void {
this.onSuccess();
}
/**
* Clean up tracks stuck in "processing" state
* Returns number of tracks reset and permanently failed
*/
async cleanupStaleProcessing(): Promise<{
reset: number;
permanentlyFailed: number;
}> {
const cutoff = new Date(
Date.now() - STALE_THRESHOLD_MINUTES * 60 * 1000
);
// Find tracks stuck in processing
const staleTracks = await prisma.track.findMany({
where: {
analysisStatus: "processing",
OR: [
{ analysisStartedAt: { lt: cutoff } },
{
analysisStartedAt: null,
updatedAt: { lt: cutoff },
},
],
},
include: {
album: {
include: {
artist: { select: { name: true } },
},
},
},
});
if (staleTracks.length === 0) {
return { reset: 0, permanentlyFailed: 0 };
}
logger.debug(
`[AudioAnalysisCleanup] Found ${staleTracks.length} stale tracks (processing > ${STALE_THRESHOLD_MINUTES} min)`
);
let resetCount = 0;
let permanentlyFailedCount = 0;
for (const track of staleTracks) {
const newRetryCount = (track.analysisRetryCount || 0) + 1;
const trackName = `${track.album.artist.name} - ${track.title}`;
if (newRetryCount >= MAX_RETRIES) {
// Permanently failed - mark as failed and record
await prisma.track.update({
where: { id: track.id },
data: {
analysisStatus: "failed",
analysisError: `Exceeded ${MAX_RETRIES} retry attempts (stale processing)`,
analysisRetryCount: newRetryCount,
analysisStartedAt: null,
},
});
// Record in EnrichmentFailure for user visibility
await enrichmentFailureService.recordFailure({
entityType: "audio",
entityId: track.id,
entityName: trackName,
errorMessage: `Analysis timed out ${MAX_RETRIES} times - track may be corrupted or unsupported`,
errorCode: "MAX_RETRIES_EXCEEDED",
metadata: {
filePath: track.filePath,
retryCount: newRetryCount,
},
});
logger.warn(
`[AudioAnalysisCleanup] Permanently failed: ${trackName}`
);
permanentlyFailedCount++;
} else {
// Reset to pending for retry
await prisma.track.update({
where: { id: track.id },
data: {
analysisStatus: "pending",
analysisStartedAt: null,
analysisRetryCount: newRetryCount,
analysisError: `Reset after stale processing (attempt ${newRetryCount}/${MAX_RETRIES})`,
},
});
logger.debug(
`[AudioAnalysisCleanup] Reset for retry (${newRetryCount}/${MAX_RETRIES}): ${trackName}`
);
resetCount++;
}
}
// Update circuit breaker state
if (resetCount > 0 || permanentlyFailedCount > 0) {
this.onFailure(resetCount, permanentlyFailedCount);
logger.debug(
`[AudioAnalysisCleanup] Cleanup complete: ${resetCount} reset, ${permanentlyFailedCount} permanently failed`
);
}
return { reset: resetCount, permanentlyFailed: permanentlyFailedCount };
}
/**
* Get current analysis statistics
*/
async getStats(): Promise<{
pending: number;
processing: number;
completed: number;
failed: number;
circuitOpen: boolean;
circuitState: CircuitState;
failureCount: number;
}> {
const [pending, processing, completed, failed] = await Promise.all([
prisma.track.count({ where: { analysisStatus: "pending" } }),
prisma.track.count({ where: { analysisStatus: "processing" } }),
prisma.track.count({ where: { analysisStatus: "completed" } }),
prisma.track.count({ where: { analysisStatus: "failed" } }),
]);
return {
pending,
processing,
completed,
failed,
circuitOpen: this.state === 'open',
circuitState: this.state,
failureCount: this.failureCount,
};
}
}
export const audioAnalysisCleanupService = new AudioAnalysisCleanupService();

View File

@@ -1,4 +1,5 @@
import * as fs from "fs";
import { logger } from "../utils/logger";
import * as path from "path";
import * as crypto from "crypto";
import { prisma } from "../utils/db";
@@ -50,7 +51,7 @@ export class AudioStreamingService {
// Start cache eviction timer (every 6 hours)
this.evictionInterval = setInterval(() => {
this.evictCache(this.transcodeCacheMaxGb).catch((err) => {
console.error("Cache eviction failed:", err);
logger.error("Cache eviction failed:", err);
});
}, 6 * 60 * 60 * 1000);
}
@@ -64,12 +65,12 @@ export class AudioStreamingService {
sourceModified: Date,
sourceAbsolutePath: string
): Promise<StreamFileInfo> {
console.log(`[AudioStreaming] Request: trackId=${trackId}, quality=${quality}, source=${path.basename(sourceAbsolutePath)}`);
logger.debug(`[AudioStreaming] Request: trackId=${trackId}, quality=${quality}, source=${path.basename(sourceAbsolutePath)}`);
// If original quality requested, return source file
if (quality === "original") {
const mimeType = this.getMimeType(sourceAbsolutePath);
console.log(`[AudioStreaming] Serving original: mimeType=${mimeType}`);
logger.debug(`[AudioStreaming] Serving original: mimeType=${mimeType}`);
return {
filePath: sourceAbsolutePath,
mimeType,
@@ -84,7 +85,7 @@ export class AudioStreamingService {
);
if (cachedPath) {
console.log(
logger.debug(
`[STREAM] Using cached transcode: ${quality} (${cachedPath})`
);
return {
@@ -103,7 +104,7 @@ export class AudioStreamingService {
: null;
if (sourceBitrate && sourceBitrate <= targetBitrate) {
console.log(
logger.debug(
`[STREAM] Source bitrate (${sourceBitrate}kbps) <= target (${targetBitrate}kbps), serving original`
);
return {
@@ -112,7 +113,7 @@ export class AudioStreamingService {
};
}
} catch (err) {
console.warn(
logger.warn(
`[STREAM] Failed to read source metadata, will transcode anyway:`,
err
);
@@ -122,7 +123,7 @@ export class AudioStreamingService {
// Need to transcode - check cache size first
const currentSize = await this.getCacheSize();
if (currentSize > this.transcodeCacheMaxGb * 0.9) {
console.log(
logger.debug(
`[STREAM] Cache near full (${currentSize.toFixed(
2
)}GB), evicting to 80%...`
@@ -131,7 +132,7 @@ export class AudioStreamingService {
}
// Transcode to cache
console.log(
logger.debug(
`[STREAM] Transcoding to ${quality} quality: ${sourceAbsolutePath}`
);
const transcodedPath = await this.transcodeToCache(
@@ -166,7 +167,7 @@ export class AudioStreamingService {
// Invalidate if source file was modified after transcode was created
if (cached.sourceModified < sourceModified) {
console.log(
logger.debug(
`[STREAM] Cache stale for track ${trackId}, removing...`
);
await prisma.transcodedFile.delete({ where: { id: cached.id } });
@@ -191,7 +192,7 @@ export class AudioStreamingService {
// Verify file exists
if (!fs.existsSync(fullPath)) {
console.log(`[STREAM] Cache file missing: ${fullPath}`);
logger.debug(`[STREAM] Cache file missing: ${fullPath}`);
await prisma.transcodedFile.delete({ where: { id: cached.id } });
return null;
}
@@ -274,7 +275,7 @@ export class AudioStreamingService {
},
});
console.log(
logger.debug(
`[STREAM] Transcode complete: ${cacheFileName} (${(
stats.size /
1024 /
@@ -322,13 +323,13 @@ export class AudioStreamingService {
* Evict cache using LRU until size is below target
*/
async evictCache(targetGb: number): Promise<void> {
console.log(`[CACHE] Starting eviction, target: ${targetGb}GB`);
logger.debug(`[CACHE] Starting eviction, target: ${targetGb}GB`);
let currentSize = await this.getCacheSize();
console.log(`[CACHE] Current size: ${currentSize.toFixed(2)}GB`);
logger.debug(`[CACHE] Current size: ${currentSize.toFixed(2)}GB`);
if (currentSize <= targetGb) {
console.log("[CACHE] Below target, no eviction needed");
logger.debug("[CACHE] Below target, no eviction needed");
return;
}
@@ -346,7 +347,7 @@ export class AudioStreamingService {
try {
await fs.promises.unlink(fullPath);
} catch (err) {
console.warn(`[CACHE] Failed to delete ${fullPath}:`, err);
logger.warn(`[CACHE] Failed to delete ${fullPath}:`, err);
}
// Delete from database
@@ -356,7 +357,7 @@ export class AudioStreamingService {
evicted++;
}
console.log(
logger.debug(
`[CACHE] Evicted ${evicted} files, new size: ${currentSize.toFixed(
2
)}GB`

View File

@@ -1,4 +1,5 @@
import { audiobookshelfService } from "./audiobookshelf";
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import fs from "fs/promises";
import path from "path";
@@ -19,6 +20,7 @@ interface SyncResult {
export class AudiobookCacheService {
private coverCacheDir: string;
private coverCacheAvailable: boolean = false;
constructor() {
// Store covers in: <MUSIC_PATH>/cover-cache/audiobooks/
@@ -29,6 +31,23 @@ export class AudiobookCacheService {
);
}
/**
* Try to ensure cover cache directory exists
* Returns true if available, false if not (permissions issue)
*/
private async ensureCoverCacheDir(): Promise<boolean> {
try {
await fs.mkdir(this.coverCacheDir, { recursive: true });
this.coverCacheAvailable = true;
return true;
} catch (error: any) {
logger.warn(`[AUDIOBOOK] Cover cache directory unavailable: ${error.message}`);
logger.warn("[AUDIOBOOK] Covers will be served directly from Audiobookshelf");
this.coverCacheAvailable = false;
return false;
}
}
/**
* Sync all audiobooks from Audiobookshelf to our database
*/
@@ -41,15 +60,15 @@ export class AudiobookCacheService {
};
try {
console.log(" Starting audiobook sync from Audiobookshelf...");
logger.debug(" Starting audiobook sync from Audiobookshelf...");
// Ensure cover cache directory exists
await fs.mkdir(this.coverCacheDir, { recursive: true });
// Try to ensure cover cache directory exists (non-fatal if it fails)
await this.ensureCoverCacheDir();
// Fetch all audiobooks from Audiobookshelf
const audiobooks = await audiobookshelfService.getAllAudiobooks();
console.log(
logger.debug(
`[AUDIOBOOK] Found ${audiobooks.length} audiobooks in Audiobookshelf`
);
@@ -66,7 +85,7 @@ export class AudiobookCacheService {
metadata.author ||
book.author ||
"Unknown Author";
console.log(` Synced: ${title} by ${author}`);
logger.debug(` Synced: ${title} by ${author}`);
} catch (error: any) {
result.failed++;
const metadata = book.media?.metadata || book;
@@ -74,23 +93,23 @@ export class AudiobookCacheService {
metadata.title || book.title || "Unknown Title";
const errorMsg = `Failed to sync ${title}: ${error.message}`;
result.errors.push(errorMsg);
console.error(` ${errorMsg}`);
logger.error(` ${errorMsg}`);
}
}
console.log("\nSync Summary:");
console.log(` Synced: ${result.synced}`);
console.log(` Failed: ${result.failed}`);
console.log(` Skipped: ${result.skipped}`);
logger.debug("\nSync Summary:");
logger.debug(` Synced: ${result.synced}`);
logger.debug(` Failed: ${result.failed}`);
logger.debug(` Skipped: ${result.skipped}`);
if (result.errors.length > 0) {
console.log("\n[ERRORS]:");
result.errors.forEach((err) => console.log(` - ${err}`));
logger.debug("\n[ERRORS]:");
result.errors.forEach((err) => logger.debug(` - ${err}`));
}
return result;
} catch (error: any) {
console.error(" Audiobook sync failed:", error);
logger.error(" Audiobook sync failed:", error);
throw error;
}
}
@@ -106,7 +125,7 @@ export class AudiobookCacheService {
// Skip if no title (invalid audiobook data)
if (!title) {
console.warn(` Skipping audiobook ${book.id} - missing title`);
logger.warn(` Skipping audiobook ${book.id} - missing title`);
return;
}
@@ -187,7 +206,7 @@ export class AudiobookCacheService {
// Log series info for debugging (only for first few books)
if (series) {
console.log(
logger.debug(
` [Series] "${title}" -> "${series}" #${
seriesSequence || "?"
}`
@@ -281,7 +300,7 @@ export class AudiobookCacheService {
return null;
} catch (error: any) {
console.error(
logger.error(
"Failed to get Audiobookshelf base URL:",
error.message
);
@@ -291,11 +310,17 @@ export class AudiobookCacheService {
/**
* Download a cover image and save it locally
* Returns null if cover caching is not available (permissions issue)
*/
private async downloadCover(
audiobookId: string,
coverUrl: string
): Promise<string> {
): Promise<string | null> {
// Skip cover download if cache directory is not available
if (!this.coverCacheAvailable) {
return null;
}
try {
// Get API key for authentication
const { getSystemSettings } = await import(
@@ -327,11 +352,11 @@ export class AudiobookCacheService {
return filePath;
} catch (error: any) {
console.error(
logger.error(
`Failed to download cover for ${audiobookId}:`,
error.message
);
return null as any; // Return null if download fails
return null;
}
}
@@ -350,7 +375,7 @@ export class AudiobookCacheService {
audiobook.lastSyncedAt <
new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)
) {
console.log(
logger.debug(
`[AUDIOBOOK] Audiobook ${audiobookId} not cached or stale, syncing...`
);
try {
@@ -362,13 +387,13 @@ export class AudiobookCacheService {
where: { id: audiobookId },
});
} catch (syncError: any) {
console.warn(
logger.warn(
` Failed to sync audiobook ${audiobookId} from Audiobookshelf:`,
syncError.message
);
// If we have stale cached data, return it anyway
if (audiobook) {
console.log(
logger.debug(
` Using stale cached data for ${audiobookId}`
);
} else {
@@ -387,6 +412,13 @@ export class AudiobookCacheService {
* Clean up old cached covers that are no longer in database
*/
async cleanupOrphanedCovers(): Promise<number> {
// Ensure cache directory is available
const available = await this.ensureCoverCacheDir();
if (!available) {
logger.warn("[AUDIOBOOK] Cannot cleanup covers - cache directory unavailable");
return 0;
}
const audiobooks = await prisma.audiobook.findMany({
select: { localCoverPath: true },
});
@@ -398,14 +430,18 @@ export class AudiobookCacheService {
);
let deleted = 0;
const files = await fs.readdir(this.coverCacheDir);
try {
const files = await fs.readdir(this.coverCacheDir);
for (const file of files) {
if (!validCoverPaths.has(file)) {
await fs.unlink(path.join(this.coverCacheDir, file));
deleted++;
console.log(` [DELETE] Deleted orphaned cover: ${file}`);
for (const file of files) {
if (!validCoverPaths.has(file)) {
await fs.unlink(path.join(this.coverCacheDir, file));
deleted++;
logger.debug(` [DELETE] Deleted orphaned cover: ${file}`);
}
}
} catch (error: any) {
logger.warn(`[AUDIOBOOK] Failed to read cover cache directory: ${error.message}`);
}
return deleted;

View File

@@ -1,5 +1,7 @@
import axios, { AxiosInstance } from "axios";
import { logger } from "../utils/logger";
import { getSystemSettings } from "../utils/systemSettings";
import { prisma } from "../utils/db";
/**
* Audiobookshelf API Service
@@ -33,13 +35,13 @@ class AudiobookshelfService {
this.baseUrl = settings.audiobookshelfUrl.replace(/\/$/, ""); // Remove trailing slash
this.apiKey = settings.audiobookshelfApiKey;
this.client = axios.create({
baseURL: this.baseUrl,
baseURL: this.baseUrl as string,
headers: {
Authorization: `Bearer ${this.apiKey}`,
},
timeout: 30000, // 30 seconds for remote server
});
console.log("Audiobookshelf configured from database");
logger.debug("Audiobookshelf configured from database");
this.initialized = true;
return;
}
@@ -47,7 +49,7 @@ class AudiobookshelfService {
if (error.message === "Audiobookshelf is disabled in settings") {
throw error;
}
console.log(
logger.debug(
" Could not load Audiobookshelf from database, checking .env"
);
}
@@ -66,7 +68,7 @@ class AudiobookshelfService {
},
timeout: 30000, // 30 seconds for remote server
});
console.log("Audiobookshelf configured from .env");
logger.debug("Audiobookshelf configured from .env");
this.initialized = true;
} else {
throw new Error("Audiobookshelf not configured");
@@ -82,7 +84,7 @@ class AudiobookshelfService {
const response = await this.client!.get("/api/libraries");
return response.status === 200;
} catch (error) {
console.error("Audiobookshelf connection failed:", error);
logger.error("Audiobookshelf connection failed:", error);
return false;
}
}
@@ -122,16 +124,22 @@ class AudiobookshelfService {
// DEBUG: Log the structure of the first item with series
if (items.length > 0) {
const itemsWithSeries = items.filter((item: any) =>
item.media?.metadata?.series || item.media?.metadata?.seriesName
const itemsWithSeries = items.filter(
(item: any) =>
item.media?.metadata?.series ||
item.media?.metadata?.seriesName
);
if (itemsWithSeries.length > 0) {
console.log(
logger.debug(
"[AUDIOBOOKSHELF DEBUG] Sample item WITH series:",
JSON.stringify(itemsWithSeries[0], null, 2).substring(0, 2000)
JSON.stringify(
itemsWithSeries[0],
null,
2
).substring(0, 2000)
);
} else {
console.log(
logger.debug(
"[AUDIOBOOKSHELF DEBUG] No items with series found! Sample item:",
JSON.stringify(items[0], null, 2).substring(0, 1000)
);
@@ -169,7 +177,7 @@ class AudiobookshelfService {
try {
return await this.getLibraryItems(library.id);
} catch (error) {
console.error(
logger.error(
`Audiobookshelf: failed to load podcast library ${library.id}`,
error
);
@@ -330,6 +338,119 @@ class AudiobookshelfService {
);
return response.data.book || [];
}
/**
* Sync audiobooks from Audiobookshelf to local database cache
* This populates the Audiobook table for full-text search
*/
async syncAudiobooksToCache() {
await this.ensureInitialized();
logger.debug("[AUDIOBOOKSHELF] Starting audiobook sync to cache...");
try {
// Fetch all audiobooks from Audiobookshelf API
const audiobooks = await this.getAllAudiobooks();
logger.debug(
`[AUDIOBOOKSHELF] Found ${audiobooks.length} audiobooks to sync`
);
// Map and upsert each audiobook to database
let syncedCount = 0;
for (const item of audiobooks) {
try {
const metadata = item.media?.metadata || {};
// Extract series information (check both possible formats)
let series: string | null = null;
let seriesSequence: string | null = null;
if (metadata.series && Array.isArray(metadata.series) && metadata.series.length > 0) {
series = metadata.series[0].name || null;
seriesSequence = metadata.series[0].sequence || null;
} else if (metadata.seriesName) {
series = metadata.seriesName;
seriesSequence = metadata.seriesSequence || null;
}
await prisma.audiobook.upsert({
where: { id: item.id },
update: {
title: metadata.title || "Untitled",
author: metadata.authorName || metadata.author || null,
narrator: metadata.narratorName || metadata.narrator || null,
description: metadata.description || null,
publishedYear: metadata.publishedYear
? parseInt(metadata.publishedYear, 10)
: null,
publisher: metadata.publisher || null,
series,
seriesSequence,
duration: item.media?.duration || null,
numTracks: item.media?.numTracks || null,
numChapters: item.media?.numChapters || null,
size: item.media?.size
? BigInt(item.media.size)
: null,
isbn: metadata.isbn || null,
asin: metadata.asin || null,
language: metadata.language || null,
genres: metadata.genres || [],
tags: item.media?.tags || [],
coverUrl: metadata.coverPath
? `${this.baseUrl}${metadata.coverPath}`
: null,
audioUrl: `${this.baseUrl}/api/items/${item.id}/play`,
libraryId: item.libraryId || null,
lastSyncedAt: new Date(),
},
create: {
id: item.id,
title: metadata.title || "Untitled",
author: metadata.authorName || metadata.author || null,
narrator: metadata.narratorName || metadata.narrator || null,
description: metadata.description || null,
publishedYear: metadata.publishedYear
? parseInt(metadata.publishedYear, 10)
: null,
publisher: metadata.publisher || null,
series,
seriesSequence,
duration: item.media?.duration || null,
numTracks: item.media?.numTracks || null,
numChapters: item.media?.numChapters || null,
size: item.media?.size
? BigInt(item.media.size)
: null,
isbn: metadata.isbn || null,
asin: metadata.asin || null,
language: metadata.language || null,
genres: metadata.genres || [],
tags: item.media?.tags || [],
coverUrl: metadata.coverPath
? `${this.baseUrl}${metadata.coverPath}`
: null,
audioUrl: `${this.baseUrl}/api/items/${item.id}/play`,
libraryId: item.libraryId || null,
},
});
syncedCount++;
} catch (error) {
logger.error(
`[AUDIOBOOKSHELF] Failed to sync audiobook ${item.id}:`,
error
);
}
}
logger.debug(
`[AUDIOBOOKSHELF] Successfully synced ${syncedCount}/${audiobooks.length} audiobooks to cache`
);
return { synced: syncedCount, total: audiobooks.length };
} catch (error) {
logger.error("[AUDIOBOOKSHELF] Audiobook sync failed:", error);
throw error;
}
}
}
export const audiobookshelfService = new AudiobookshelfService();

View File

@@ -1,4 +1,5 @@
import axios from "axios";
import { logger } from "../utils/logger";
import { redisClient } from "../utils/redis";
import { rateLimiter } from "./rateLimiter";
@@ -13,7 +14,7 @@ class CoverArtService {
if (cached === "NOT_FOUND") return null; // Cached negative result
if (cached) return cached;
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -35,7 +36,7 @@ class CoverArtService {
try {
await redisClient.setEx(cacheKey, 2592000, coverUrl); // 30 days
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return coverUrl;
@@ -57,7 +58,7 @@ class CoverArtService {
}
return null;
}
console.error(`Cover art error for ${rgMbid}:`, error.message);
logger.error(`Cover art error for ${rgMbid}:`, error.message);
}
return null;

View File

@@ -1,4 +1,5 @@
import * as fs from "fs";
import { logger } from "../utils/logger";
import * as path from "path";
import * as crypto from "crypto";
import { parseFile } from "music-metadata";
@@ -44,13 +45,13 @@ export class CoverArtExtractor {
// Save to cache
await fs.promises.writeFile(cachePath, picture.data);
console.log(
logger.debug(
`[COVER-ART] Extracted cover art from ${path.basename(audioFilePath)}: ${cacheFileName}`
);
return cacheFileName;
} catch (err) {
console.error(
logger.error(
`[COVER-ART] Failed to extract from ${audioFilePath}:`,
err
);

View File

@@ -10,6 +10,7 @@
* - All fetched data is persisted for future use
*/
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import { redisClient } from "../utils/redis";
import { fanartService } from "./fanart";
@@ -38,15 +39,16 @@ class DataCacheService {
try {
const artist = await prisma.artist.findUnique({
where: { id: artistId },
select: { heroUrl: true },
select: { heroUrl: true, userHeroUrl: true },
});
if (artist?.heroUrl) {
const displayHeroUrl = artist?.userHeroUrl ?? artist?.heroUrl;
if (displayHeroUrl) {
// Also populate Redis for faster future reads
this.setRedisCache(cacheKey, artist.heroUrl, ARTIST_IMAGE_TTL);
return artist.heroUrl;
this.setRedisCache(cacheKey, displayHeroUrl, ARTIST_IMAGE_TTL);
return displayHeroUrl;
}
} catch (err) {
console.warn("[DataCache] DB lookup failed for artist:", artistId);
logger.warn("[DataCache] DB lookup failed for artist:", artistId);
}
// 2. Check Redis cache
@@ -98,7 +100,7 @@ class DataCacheService {
return album.coverUrl;
}
} catch (err) {
console.warn("[DataCache] DB lookup failed for album:", albumId);
logger.warn("[DataCache] DB lookup failed for album:", albumId);
}
// 2. Check Redis cache
@@ -155,14 +157,15 @@ class DataCacheService {
* Only returns what's already cached, doesn't make API calls
*/
async getArtistImagesBatch(
artists: Array<{ id: string; heroUrl?: string | null }>
artists: Array<{ id: string; heroUrl?: string | null; userHeroUrl?: string | null }>
): Promise<Map<string, string | null>> {
const results = new Map<string, string | null>();
// First, use any heroUrls already in the data
// First, use any heroUrls/userHeroUrls already in the data (with override pattern)
for (const artist of artists) {
if (artist.heroUrl) {
results.set(artist.id, artist.heroUrl);
const displayHeroUrl = artist.userHeroUrl ?? artist.heroUrl;
if (displayHeroUrl) {
results.set(artist.id, displayHeroUrl);
}
}
@@ -242,7 +245,7 @@ class DataCacheService {
try {
heroUrl = await fanartService.getArtistImage(mbid);
if (heroUrl) {
console.log(`[DataCache] Got image from Fanart.tv for ${artistName}`);
logger.debug(`[DataCache] Got image from Fanart.tv for ${artistName}`);
return heroUrl;
}
} catch (err) {
@@ -254,7 +257,7 @@ class DataCacheService {
try {
heroUrl = await deezerService.getArtistImage(artistName);
if (heroUrl) {
console.log(`[DataCache] Got image from Deezer for ${artistName}`);
logger.debug(`[DataCache] Got image from Deezer for ${artistName}`);
return heroUrl;
}
} catch (err) {
@@ -275,7 +278,7 @@ class DataCacheService {
// Filter out Last.fm placeholder images
const imageUrl = largestImage["#text"];
if (!imageUrl.includes("2a96cbd8b46e442fc41c2b86b821562f")) {
console.log(`[DataCache] Got image from Last.fm for ${artistName}`);
logger.debug(`[DataCache] Got image from Last.fm for ${artistName}`);
return imageUrl;
}
}
@@ -284,7 +287,7 @@ class DataCacheService {
// Last.fm failed
}
console.log(`[DataCache] No image found for ${artistName}`);
logger.debug(`[DataCache] No image found for ${artistName}`);
return null;
}
@@ -298,7 +301,7 @@ class DataCacheService {
data: { heroUrl },
});
} catch (err) {
console.warn("[DataCache] Failed to update artist heroUrl:", err);
logger.warn("[DataCache] Failed to update artist heroUrl:", err);
}
}
@@ -312,7 +315,7 @@ class DataCacheService {
data: { coverUrl },
});
} catch (err) {
console.warn("[DataCache] Failed to update album coverUrl:", err);
logger.warn("[DataCache] Failed to update album coverUrl:", err);
}
}
@@ -327,12 +330,32 @@ class DataCacheService {
}
}
/**
* Set multiple Redis cache entries using pipelining
* Uses MULTI/EXEC for atomic batch writes
*/
private async setRedisCacheBatch(
entries: Array<{ key: string; value: string; ttl: number }>
): Promise<void> {
if (entries.length === 0) return;
try {
const multi = redisClient.multi();
for (const { key, value, ttl } of entries) {
multi.setEx(key, ttl, value);
}
await multi.exec();
} catch (err) {
logger.warn("[DataCache] Batch cache write failed:", err);
}
}
/**
* Warm up Redis cache from database
* Called on server startup
*/
async warmupCache(): Promise<void> {
console.log("[DataCache] Warming up Redis cache from database...");
logger.debug("[DataCache] Warming up Redis cache from database...");
try {
// Warm up artist images
@@ -341,14 +364,16 @@ class DataCacheService {
select: { id: true, heroUrl: true },
});
let artistCount = 0;
for (const artist of artists) {
if (artist.heroUrl) {
await this.setRedisCache(`hero:${artist.id}`, artist.heroUrl, ARTIST_IMAGE_TTL);
artistCount++;
}
}
console.log(`[DataCache] Cached ${artistCount} artist images`);
const artistEntries = artists
.filter((a) => a.heroUrl)
.map((a) => ({
key: `hero:${a.id}`,
value: a.heroUrl!,
ttl: ARTIST_IMAGE_TTL,
}));
await this.setRedisCacheBatch(artistEntries);
logger.debug(`[DataCache] Cached ${artistEntries.length} artist images`);
// Warm up album covers
const albums = await prisma.album.findMany({
@@ -356,18 +381,20 @@ class DataCacheService {
select: { id: true, coverUrl: true },
});
let albumCount = 0;
for (const album of albums) {
if (album.coverUrl) {
await this.setRedisCache(`album-cover:${album.id}`, album.coverUrl, ALBUM_COVER_TTL);
albumCount++;
}
}
console.log(`[DataCache] Cached ${albumCount} album covers`);
const albumEntries = albums
.filter((a) => a.coverUrl)
.map((a) => ({
key: `album-cover:${a.id}`,
value: a.coverUrl!,
ttl: ALBUM_COVER_TTL,
}));
console.log("[DataCache] Cache warmup complete");
await this.setRedisCacheBatch(albumEntries);
logger.debug(`[DataCache] Cached ${albumEntries.length} album covers`);
logger.debug("[DataCache] Cache warmup complete");
} catch (err) {
console.error("[DataCache] Cache warmup failed:", err);
logger.error("[DataCache] Cache warmup failed:", err);
}
}
}

View File

@@ -1,4 +1,5 @@
import axios from "axios";
import { logger } from "../utils/logger";
import { redisClient } from "../utils/redis";
/**
@@ -91,7 +92,7 @@ class DeezerService {
*/
private async setCache(key: string, value: string): Promise<void> {
try {
await redisClient.setex(`${this.cachePrefix}${key}`, this.cacheTTL, value);
await redisClient.setEx(`${this.cachePrefix}${key}`, this.cacheTTL, value);
} catch {
// Ignore cache errors
}
@@ -121,7 +122,7 @@ class DeezerService {
await this.setCache(cacheKey, imageUrl || "null");
return imageUrl;
} catch (error: any) {
console.error(`Deezer artist image error for ${artistName}:`, error.message);
logger.error(`Deezer artist image error for ${artistName}:`, error.message);
return null;
}
}
@@ -157,7 +158,7 @@ class DeezerService {
await this.setCache(cacheKey, coverUrl || "null");
return coverUrl;
} catch (error: any) {
console.error(`Deezer album cover error for ${artistName} - ${albumName}:`, error.message);
logger.error(`Deezer album cover error for ${artistName} - ${albumName}:`, error.message);
return null;
}
}
@@ -182,7 +183,7 @@ class DeezerService {
await this.setCache(cacheKey, previewUrl || "null");
return previewUrl;
} catch (error: any) {
console.error(`Deezer track preview error for ${artistName} - ${trackName}:`, error.message);
logger.error(`Deezer track preview error for ${artistName} - ${trackName}:`, error.message);
return null;
}
}
@@ -218,7 +219,7 @@ class DeezerService {
*/
async getPlaylist(playlistId: string): Promise<DeezerPlaylist | null> {
try {
console.log(`Deezer: Fetching playlist ${playlistId}...`);
logger.debug(`Deezer: Fetching playlist ${playlistId}...`);
const response = await axios.get(`${DEEZER_API}/playlist/${playlistId}`, {
timeout: 15000,
@@ -226,7 +227,7 @@ class DeezerService {
const data = response.data;
if (data.error) {
console.error("Deezer API error:", data.error);
logger.error("Deezer API error:", data.error);
return null;
}
@@ -242,7 +243,7 @@ class DeezerService {
coverUrl: track.album?.cover_medium || track.album?.cover || null,
}));
console.log(`Deezer: Fetched playlist "${data.title}" with ${tracks.length} tracks`);
logger.debug(`Deezer: Fetched playlist "${data.title}" with ${tracks.length} tracks`);
return {
id: String(data.id),
@@ -255,7 +256,7 @@ class DeezerService {
isPublic: data.public ?? true,
};
} catch (error: any) {
console.error("Deezer playlist fetch error:", error.message);
logger.error("Deezer playlist fetch error:", error.message);
return null;
}
}
@@ -280,7 +281,7 @@ class DeezerService {
fans: playlist.fans || 0,
}));
} catch (error: any) {
console.error("Deezer chart playlists error:", error.message);
logger.error("Deezer chart playlists error:", error.message);
return [];
}
}
@@ -305,7 +306,7 @@ class DeezerService {
fans: 0,
}));
} catch (error: any) {
console.error("Deezer playlist search error:", error.message);
logger.error("Deezer playlist search error:", error.message);
return [];
}
}
@@ -319,7 +320,7 @@ class DeezerService {
const cacheKey = `playlists:featured:${limit}`;
const cached = await this.getCached(cacheKey);
if (cached) {
console.log("Deezer: Returning cached featured playlists");
logger.debug("Deezer: Returning cached featured playlists");
return JSON.parse(cached);
}
@@ -328,7 +329,7 @@ class DeezerService {
const seenIds = new Set<string>();
// 1. Get chart playlists (max 99 available)
console.log("Deezer: Fetching chart playlists from API...");
logger.debug("Deezer: Fetching chart playlists from API...");
const chartPlaylists = await this.getChartPlaylists(Math.min(limit, 99));
for (const p of chartPlaylists) {
if (!seenIds.has(p.id)) {
@@ -336,7 +337,7 @@ class DeezerService {
allPlaylists.push(p);
}
}
console.log(`Deezer: Got ${chartPlaylists.length} chart playlists`);
logger.debug(`Deezer: Got ${chartPlaylists.length} chart playlists`);
// 2. If we need more, search for popular genre playlists
if (allPlaylists.length < limit) {
@@ -360,11 +361,11 @@ class DeezerService {
}
const result = allPlaylists.slice(0, limit);
console.log(`Deezer: Caching ${result.length} featured playlists`);
logger.debug(`Deezer: Caching ${result.length} featured playlists`);
await this.setCache(cacheKey, JSON.stringify(result));
return result;
} catch (error: any) {
console.error("Deezer featured playlists error:", error.message);
logger.error("Deezer featured playlists error:", error.message);
return [];
}
}
@@ -380,12 +381,12 @@ class DeezerService {
const cacheKey = "genres:all";
const cached = await this.getCached(cacheKey);
if (cached) {
console.log("Deezer: Returning cached genres");
logger.debug("Deezer: Returning cached genres");
return JSON.parse(cached);
}
try {
console.log("Deezer: Fetching genres from API...");
logger.debug("Deezer: Fetching genres from API...");
const response = await axios.get(`${DEEZER_API}/genre`, {
timeout: 10000,
});
@@ -398,11 +399,11 @@ class DeezerService {
imageUrl: genre.picture_medium || genre.picture || null,
}));
console.log(`Deezer: Caching ${genres.length} genres`);
logger.debug(`Deezer: Caching ${genres.length} genres`);
await this.setCache(cacheKey, JSON.stringify(genres));
return genres;
} catch (error: any) {
console.error("Deezer genres error:", error.message);
logger.error("Deezer genres error:", error.message);
return [];
}
}
@@ -426,12 +427,12 @@ class DeezerService {
const cacheKey = "radio:stations";
const cached = await this.getCached(cacheKey);
if (cached) {
console.log("Deezer: Returning cached radio stations");
logger.debug("Deezer: Returning cached radio stations");
return JSON.parse(cached);
}
try {
console.log("Deezer: Fetching radio stations from API...");
logger.debug("Deezer: Fetching radio stations from API...");
const response = await axios.get(`${DEEZER_API}/radio`, {
timeout: 10000,
});
@@ -444,11 +445,11 @@ class DeezerService {
type: "radio" as const,
}));
console.log(`Deezer: Got ${stations.length} radio stations, caching...`);
logger.debug(`Deezer: Got ${stations.length} radio stations, caching...`);
await this.setCache(cacheKey, JSON.stringify(stations));
return stations;
} catch (error: any) {
console.error("Deezer radio stations error:", error.message);
logger.error("Deezer radio stations error:", error.message);
return [];
}
}
@@ -464,12 +465,12 @@ class DeezerService {
const cacheKey = "radio:by-genre";
const cached = await this.getCached(cacheKey);
if (cached) {
console.log("Deezer: Returning cached radios by genre");
logger.debug("Deezer: Returning cached radios by genre");
return JSON.parse(cached);
}
try {
console.log("Deezer: Fetching radios by genre from API...");
logger.debug("Deezer: Fetching radios by genre from API...");
const response = await axios.get(`${DEEZER_API}/radio/genres`, {
timeout: 10000,
});
@@ -486,11 +487,11 @@ class DeezerService {
})),
}));
console.log(`Deezer: Got ${genres.length} genre categories with radios, caching...`);
logger.debug(`Deezer: Got ${genres.length} genre categories with radios, caching...`);
await this.setCache(cacheKey, JSON.stringify(genres));
return genres;
} catch (error: any) {
console.error("Deezer radios by genre error:", error.message);
logger.error("Deezer radios by genre error:", error.message);
return [];
}
}
@@ -500,7 +501,7 @@ class DeezerService {
*/
async getRadioTracks(radioId: string): Promise<DeezerPlaylist | null> {
try {
console.log(`Deezer: Fetching radio ${radioId} tracks...`);
logger.debug(`Deezer: Fetching radio ${radioId} tracks...`);
// First get radio info
const infoResponse = await axios.get(`${DEEZER_API}/radio/${radioId}`, {
@@ -526,7 +527,7 @@ class DeezerService {
coverUrl: track.album?.cover_medium || track.album?.cover || null,
}));
console.log(`Deezer: Fetched radio "${radioInfo.title}" with ${tracks.length} tracks`);
logger.debug(`Deezer: Fetched radio "${radioInfo.title}" with ${tracks.length} tracks`);
return {
id: `radio-${radioId}`,
@@ -539,7 +540,7 @@ class DeezerService {
isPublic: true,
};
} catch (error: any) {
console.error("Deezer radio tracks error:", error.message);
logger.error("Deezer radio tracks error:", error.message);
return null;
}
}
@@ -578,7 +579,7 @@ class DeezerService {
return { playlists, radios };
} catch (error: any) {
console.error("Deezer editorial content error:", error.message);
logger.error("Deezer editorial content error:", error.message);
return { playlists: [], radios: [] };
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
import * as fs from "fs";
import { logger } from "../utils/logger";
import * as path from "path";
/**
@@ -59,7 +60,7 @@ class DiscoveryLogger {
}
// Also write to console for real-time visibility
console.log(message);
logger.debug(message);
}
/**

View File

@@ -1,3 +1,5 @@
import { logger } from "../utils/logger";
interface DownloadInfo {
downloadId: string;
albumTitle: string;
@@ -72,15 +74,15 @@ class DownloadQueueManager {
};
this.activeDownloads.set(downloadId, info);
console.log(
logger.debug(
`[DOWNLOAD] Started: "${albumTitle}" by ${artistName} (${downloadId})`
);
console.log(` Album MBID: ${albumMbid}`);
console.log(` Active downloads: ${this.activeDownloads.size}`);
logger.debug(` Album MBID: ${albumMbid}`);
logger.debug(` Active downloads: ${this.activeDownloads.size}`);
// Persist Lidarr download reference to download job for later status updates
this.linkDownloadJob(downloadId, albumMbid).catch((error) => {
console.error(` linkDownloadJob error:`, error);
logger.error(` linkDownloadJob error:`, error);
});
// Start timeout on first download
@@ -108,12 +110,12 @@ class DownloadQueueManager {
*/
async completeDownload(downloadId: string, albumTitle: string) {
this.activeDownloads.delete(downloadId);
console.log(`Download complete: "${albumTitle}" (${downloadId})`);
console.log(` Remaining downloads: ${this.activeDownloads.size}`);
logger.debug(`Download complete: "${albumTitle}" (${downloadId})`);
logger.debug(` Remaining downloads: ${this.activeDownloads.size}`);
// If no more downloads, trigger refresh immediately
if (this.activeDownloads.size === 0) {
console.log(`⏰ All downloads complete! Starting refresh now...`);
logger.debug(`⏰ All downloads complete! Starting refresh now...`);
this.clearTimeout();
this.triggerFullRefresh();
}
@@ -125,29 +127,29 @@ class DownloadQueueManager {
async failDownload(downloadId: string, reason: string) {
const info = this.activeDownloads.get(downloadId);
if (!info) {
console.log(
logger.debug(
` Download ${downloadId} not tracked, ignoring failure`
);
return;
}
console.log(` Download failed: "${info.albumTitle}" (${downloadId})`);
console.log(` Reason: ${reason}`);
console.log(` Attempt ${info.attempts}/${this.MAX_RETRY_ATTEMPTS}`);
logger.debug(` Download failed: "${info.albumTitle}" (${downloadId})`);
logger.debug(` Reason: ${reason}`);
logger.debug(` Attempt ${info.attempts}/${this.MAX_RETRY_ATTEMPTS}`);
// Check if we should retry
if (info.attempts < this.MAX_RETRY_ATTEMPTS) {
info.attempts++;
console.log(` Retrying download... (attempt ${info.attempts})`);
logger.debug(` Retrying download... (attempt ${info.attempts})`);
await this.retryDownload(info);
} else {
console.log(` ⛔ Max retry attempts reached, giving up`);
logger.debug(` ⛔ Max retry attempts reached, giving up`);
await this.cleanupFailedAlbum(info);
this.activeDownloads.delete(downloadId);
// Check if all downloads are done
if (this.activeDownloads.size === 0) {
console.log(
logger.debug(
`⏰ All downloads finished (some failed). Starting refresh...`
);
this.clearTimeout();
@@ -162,7 +164,7 @@ class DownloadQueueManager {
private async retryDownload(info: DownloadInfo) {
try {
if (!info.albumId) {
console.log(` No album ID, cannot retry`);
logger.debug(` No album ID, cannot retry`);
return;
}
@@ -176,7 +178,7 @@ class DownloadQueueManager {
!settings.lidarrUrl ||
!settings.lidarrApiKey
) {
console.log(` Lidarr not configured`);
logger.debug(` Lidarr not configured`);
return;
}
@@ -195,9 +197,9 @@ class DownloadQueueManager {
}
);
console.log(` Retry search triggered in Lidarr`);
logger.debug(` Retry search triggered in Lidarr`);
} catch (error: any) {
console.log(` Failed to retry: ${error.message}`);
logger.debug(` Failed to retry: ${error.message}`);
}
}
@@ -206,7 +208,7 @@ class DownloadQueueManager {
*/
private async cleanupFailedAlbum(info: DownloadInfo) {
try {
console.log(` Cleaning up failed album: ${info.albumTitle}`);
logger.debug(` Cleaning up failed album: ${info.albumTitle}`);
const { getSystemSettings } = await import(
"../utils/systemSettings"
@@ -233,9 +235,9 @@ class DownloadQueueManager {
timeout: 10000,
}
);
console.log(` Removed album from Lidarr`);
logger.debug(` Removed album from Lidarr`);
} catch (error: any) {
console.log(` Failed to remove album: ${error.message}`);
logger.debug(` Failed to remove album: ${error.message}`);
}
}
@@ -264,27 +266,27 @@ class DownloadQueueManager {
timeout: 10000,
}
);
console.log(
logger.debug(
` Removed artist from Lidarr (no other albums)`
);
}
} catch (error: any) {
console.log(
logger.debug(
` Failed to check/remove artist: ${error.message}`
);
}
}
// Mark as failed in Discovery database
// Mark as deleted in Discovery database (closest to failed status)
const { prisma } = await import("../utils/db");
await prisma.discoveryAlbum.updateMany({
where: { albumTitle: info.albumTitle },
data: { status: "FAILED" },
data: { status: "DELETED" },
});
console.log(` Marked as failed in database`);
logger.debug(` Marked as failed in database`);
// Notify callbacks about unavailable album
console.log(
logger.debug(
` [NOTIFY] Notifying ${this.unavailableCallbacks.length} callbacks about unavailable album`
);
for (const callback of this.unavailableCallbacks) {
@@ -299,11 +301,11 @@ class DownloadQueueManager {
similarity: info.similarity,
});
} catch (error: any) {
console.log(` Callback error: ${error.message}`);
logger.debug(` Callback error: ${error.message}`);
}
}
} catch (error: any) {
console.log(` Cleanup error: ${error.message}`);
logger.debug(` Cleanup error: ${error.message}`);
}
}
@@ -312,20 +314,20 @@ class DownloadQueueManager {
*/
private startTimeout() {
const timeoutMs = this.TIMEOUT_MINUTES * 60 * 1000;
console.log(
logger.debug(
`[TIMER] Starting ${this.TIMEOUT_MINUTES}-minute timeout for automatic scan`
);
this.timeoutTimer = setTimeout(() => {
if (this.activeDownloads.size > 0) {
console.log(
logger.debug(
`\n Timeout reached! ${this.activeDownloads.size} downloads still pending.`
);
console.log(` These downloads never completed:`);
logger.debug(` These downloads never completed:`);
// Mark each pending download as failed to trigger callbacks
for (const [downloadId, info] of this.activeDownloads) {
console.log(
logger.debug(
` - ${info.albumTitle} by ${info.artistName}`
);
// This will trigger the unavailable album callback
@@ -333,14 +335,14 @@ class DownloadQueueManager {
downloadId,
"Download timeout - never completed"
).catch((err) => {
console.error(
logger.error(
`Error failing download ${downloadId}:`,
err
);
});
}
console.log(
logger.debug(
` Triggering scan anyway to process completed downloads...\n`
);
} else {
@@ -364,27 +366,27 @@ class DownloadQueueManager {
*/
private async triggerFullRefresh() {
try {
console.log("\n Starting full library refresh...\n");
logger.debug("\n Starting full library refresh...\n");
// Step 1: Clear failed imports from Lidarr
console.log("[1/2] Checking for failed imports in Lidarr...");
logger.debug("[1/2] Checking for failed imports in Lidarr...");
await this.clearFailedLidarrImports();
// Step 2: Trigger Lidify library sync
console.log("[2/2] Triggering Lidify library sync...");
logger.debug("[2/2] Triggering Lidify library sync...");
const lidifySuccess = await this.triggerLidifySync();
if (!lidifySuccess) {
console.error(" Lidify sync failed");
logger.error(" Lidify sync failed");
return;
}
console.log("Lidify sync started");
console.log(
logger.debug("Lidify sync started");
logger.debug(
"\n[SUCCESS] Full library refresh complete! New music should appear shortly.\n"
);
} catch (error) {
console.error(" Library refresh error:", error);
logger.error(" Library refresh error:", error);
}
}
@@ -399,7 +401,7 @@ class DownloadQueueManager {
const settings = await getSystemSettings();
if (!settings.lidarrEnabled || !settings.lidarrUrl) {
console.log(" Lidarr not configured, skipping");
logger.debug(" Lidarr not configured, skipping");
return;
}
@@ -408,7 +410,7 @@ class DownloadQueueManager {
// Get Lidarr API key
const apiKey = settings.lidarrApiKey;
if (!apiKey) {
console.log(" Lidarr API key not found, skipping");
logger.debug(" Lidarr API key not found, skipping");
return;
}
@@ -433,11 +435,11 @@ class DownloadQueueManager {
);
if (failed.length === 0) {
console.log(" No failed imports found");
logger.debug(" No failed imports found");
return;
}
console.log(` Found ${failed.length} failed import(s)`);
logger.debug(` Found ${failed.length} failed import(s)`);
for (const item of failed) {
const artistName =
@@ -445,7 +447,7 @@ class DownloadQueueManager {
const albumTitle =
item.album?.title || item.album?.name || "Unknown Album";
console.log(` ${artistName} - ${albumTitle}`);
logger.debug(` ${artistName} - ${albumTitle}`);
try {
// Remove from queue, blocklist, and trigger search
@@ -474,22 +476,22 @@ class DownloadQueueManager {
timeout: 10000,
}
);
console.log(
logger.debug(
` → Blocklisted and searching for alternative`
);
} else {
console.log(
logger.debug(
` → Blocklisted (no album ID for re-search)`
);
}
} catch (error: any) {
console.log(` Failed to process: ${error.message}`);
logger.debug(` Failed to process: ${error.message}`);
}
}
console.log(` Cleared ${failed.length} failed import(s)`);
logger.debug(` Cleared ${failed.length} failed import(s)`);
} catch (error: any) {
console.log(` Failed to check Lidarr queue: ${error.message}`);
logger.debug(` Failed to check Lidarr queue: ${error.message}`);
}
}
@@ -501,12 +503,12 @@ class DownloadQueueManager {
const { scanQueue } = await import("../workers/queues");
const { prisma } = await import("../utils/db");
console.log(" Starting library scan...");
logger.debug(" Starting library scan...");
// Get first user for scanning
const firstUser = await prisma.user.findFirst();
if (!firstUser) {
console.error(` No users found in database, cannot scan`);
logger.error(` No users found in database, cannot scan`);
return false;
}
@@ -516,10 +518,10 @@ class DownloadQueueManager {
source: "download-queue",
});
console.log("Library scan queued");
logger.debug("Library scan queued");
return true;
} catch (error: any) {
console.error("Lidify sync trigger error:", error.message);
logger.error("Lidify sync trigger error:", error.message);
return false;
}
}
@@ -546,7 +548,7 @@ class DownloadQueueManager {
* Manually trigger a full refresh (for testing or manual triggers)
*/
async manualRefresh() {
console.log("\n Manual refresh triggered...\n");
logger.debug("\n Manual refresh triggered...\n");
await this.triggerFullRefresh();
}
@@ -561,7 +563,7 @@ class DownloadQueueManager {
for (const [downloadId, info] of this.activeDownloads) {
const age = now - info.startTime;
if (age > this.STALE_TIMEOUT_MS) {
console.log(
logger.debug(
`[CLEANUP] Cleaning up stale download: "${
info.albumTitle
}" (${downloadId}) - age: ${Math.round(
@@ -574,7 +576,7 @@ class DownloadQueueManager {
}
if (cleanedCount > 0) {
console.log(
logger.debug(
`[CLEANUP] Cleaned up ${cleanedCount} stale download(s)`
);
}
@@ -582,6 +584,71 @@ class DownloadQueueManager {
return cleanedCount;
}
/**
* Reconcile in-memory state with database on startup
* - Mark stale jobs (>30 min without update) as failed
* - Load active/processing jobs into memory
*/
async reconcileOnStartup(): Promise<{ loaded: number; failed: number }> {
const { prisma } = await import("../utils/db");
const staleThreshold = new Date(Date.now() - this.STALE_TIMEOUT_MS);
// Mark stale processing jobs as failed
const staleResult = await prisma.downloadJob.updateMany({
where: {
status: "processing",
startedAt: { lt: staleThreshold }
},
data: {
status: "failed",
error: "Server restart - download was processing but never completed"
}
});
logger.debug(`[DOWNLOAD] Marked ${staleResult.count} stale downloads as failed`);
// Load recent processing jobs into memory (not stale)
const activeJobs = await prisma.downloadJob.findMany({
where: {
status: "processing",
startedAt: { gte: staleThreshold }
},
select: {
id: true,
subject: true,
targetMbid: true,
lidarrRef: true,
metadata: true,
startedAt: true,
attempts: true
}
});
// Populate in-memory map from database
for (const job of activeJobs) {
const metadata = job.metadata as Record<string, any> || {};
this.activeDownloads.set(job.lidarrRef || job.id, {
downloadId: job.lidarrRef || job.id,
albumTitle: job.subject,
albumMbid: job.targetMbid,
artistName: metadata.artistName || "Unknown",
artistMbid: metadata.artistMbid,
albumId: metadata.lidarrAlbumId,
artistId: metadata.lidarrArtistId,
attempts: job.attempts,
startTime: job.startedAt?.getTime() || Date.now(),
userId: metadata.userId,
tier: metadata.tier,
similarity: metadata.similarity
});
}
logger.debug(`[DOWNLOAD] Loaded ${activeJobs.length} active downloads from database`);
return { loaded: activeJobs.length, failed: staleResult.count };
}
/**
* Shutdown the download queue manager (cleanup resources)
*/
@@ -592,14 +659,14 @@ class DownloadQueueManager {
}
this.clearTimeout();
this.activeDownloads.clear();
console.log("Download queue manager shutdown");
logger.debug("Download queue manager shutdown");
}
/**
* Link Lidarr download IDs to download jobs (so we can mark them completed later)
*/
private async linkDownloadJob(downloadId: string, albumMbid: string) {
console.log(
logger.debug(
` [LINK] Attempting to link download job for MBID: ${albumMbid}`
);
try {
@@ -615,7 +682,7 @@ class DownloadQueueManager {
targetMbid: true,
},
});
console.log(
logger.debug(
` [LINK] Found ${existingJobs.length} job(s) with this MBID:`,
JSON.stringify(existingJobs, null, 2)
);
@@ -629,27 +696,28 @@ class DownloadQueueManager {
data: {
lidarrRef: downloadId,
status: "processing",
startedAt: new Date(),
},
});
if (result.count === 0) {
console.log(
logger.debug(
` No matching download jobs found to link with Lidarr ID ${downloadId}`
);
console.log(
logger.debug(
` This means either: no job exists, job already has lidarrRef, or status is not pending/processing`
);
} else {
console.log(
logger.debug(
` Linked Lidarr download ${downloadId} to ${result.count} download job(s)`
);
}
} catch (error: any) {
console.error(
logger.error(
` Failed to persist Lidarr download link:`,
error.message
);
console.error(` Error details:`, error);
logger.error(` Error details:`, error);
}
}
}

View File

@@ -14,6 +14,7 @@
* - Manual override support
*/
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
import { lastFmService } from "./lastfm";
import { musicBrainzService } from "./musicbrainz";
@@ -171,7 +172,7 @@ export class EnrichmentService {
throw new Error(`Artist ${artistId} not found`);
}
console.log(`Enriching artist: ${artist.name}`);
logger.debug(`Enriching artist: ${artist.name}`);
const enrichmentData: ArtistEnrichmentData = {
confidence: 0,
@@ -190,10 +191,10 @@ export class EnrichmentService {
if (mbResults.length > 0) {
enrichmentData.mbid = mbResults[0].id;
enrichmentData.confidence += 0.4;
console.log(` Found MBID: ${enrichmentData.mbid}`);
logger.debug(` Found MBID: ${enrichmentData.mbid}`);
}
} catch (error) {
console.error(` MusicBrainz lookup failed:`, error);
logger.error(` MusicBrainz lookup failed:`, error);
}
}
@@ -214,7 +215,7 @@ export class EnrichmentService {
lastfmInfo.tags?.tag?.map((t: any) => t.name) || [];
enrichmentData.genres = enrichmentData.tags?.slice(0, 3); // Top 3 tags as genres
enrichmentData.confidence += 0.3;
console.log(
logger.debug(
` Found Last.fm data: ${
enrichmentData.tags?.length || 0
} tags`
@@ -228,10 +229,10 @@ export class EnrichmentService {
enrichmentData.similarArtists = similar.map(
(a: any) => a.name
);
console.log(` Found ${similar.length} similar artists`);
logger.debug(` Found ${similar.length} similar artists`);
}
} catch (error) {
console.error(
logger.error(
` ✗ Last.fm lookup failed:`,
error instanceof Error ? error.message : error
);
@@ -251,16 +252,16 @@ export class EnrichmentService {
if (imageResult) {
enrichmentData.heroUrl = imageResult.url;
enrichmentData.confidence += 0.2;
console.log(` Found artist image from ${imageResult.source}`);
logger.debug(` Found artist image from ${imageResult.source}`);
}
} catch (error) {
console.error(
logger.error(
` ✗ Artist image lookup failed:`,
error instanceof Error ? error.message : error
);
}
console.log(
logger.debug(
` Enrichment confidence: ${(
enrichmentData.confidence * 100
).toFixed(0)}%`
@@ -294,7 +295,7 @@ export class EnrichmentService {
throw new Error(`Album ${albumId} not found`);
}
console.log(
logger.debug(
`[Enrichment] Processing album: ${album.artist.name} - ${album.title}`
);
@@ -335,7 +336,7 @@ export class EnrichmentService {
? new Date(match["first-release-date"])
: undefined;
enrichmentData.confidence += 0.5;
console.log(` Found MBID: ${enrichmentData.rgMbid}`);
logger.debug(` Found MBID: ${enrichmentData.rgMbid}`);
// Try to get label info from first release
try {
@@ -355,18 +356,18 @@ export class EnrichmentService {
) {
enrichmentData.label =
releaseInfo["label-info"][0].label.name;
console.log(
logger.debug(
` Found label: ${enrichmentData.label}`
);
}
}
} catch (error) {
console.log(`Could not fetch label info`);
logger.debug(`Could not fetch label info`);
}
}
}
} catch (error) {
console.error(` MusicBrainz lookup failed:`, error);
logger.error(` MusicBrainz lookup failed:`, error);
}
}
@@ -375,8 +376,7 @@ export class EnrichmentService {
try {
const lastfmInfo = await lastFmService.getAlbumInfo(
album.artist.name,
album.title,
enrichmentData.rgMbid
album.title
);
if (lastfmInfo) {
@@ -386,14 +386,14 @@ export class EnrichmentService {
enrichmentData.trackCount =
lastfmInfo.tracks?.track?.length;
enrichmentData.confidence += 0.3;
console.log(
logger.debug(
` Found Last.fm data: ${
enrichmentData.tags?.length || 0
} tags`
);
}
} catch (error) {
console.error(` Last.fm lookup failed:`, error);
logger.error(` Last.fm lookup failed:`, error);
}
}
@@ -408,16 +408,16 @@ export class EnrichmentService {
if (coverResult) {
enrichmentData.coverUrl = coverResult.url;
enrichmentData.confidence += 0.2;
console.log(` Found cover art from ${coverResult.source}`);
logger.debug(` Found cover art from ${coverResult.source}`);
}
} catch (error) {
console.error(
logger.error(
` ✗ Cover art lookup failed:`,
error instanceof Error ? error.message : error
);
}
console.log(
logger.debug(
` Enrichment confidence: ${(
enrichmentData.confidence * 100
).toFixed(0)}%`
@@ -443,7 +443,7 @@ export class EnrichmentService {
});
if (existingArtist && existingArtist.id !== artistId) {
console.log(
logger.debug(
`MBID ${data.mbid} already used by "${existingArtist.name}", skipping MBID update`
);
} else {
@@ -462,7 +462,7 @@ export class EnrichmentService {
where: { id: artistId },
data: updateData,
});
console.log(
logger.debug(
` Saved ${data.genres?.length || 0} genres for artist`
);
}
@@ -480,6 +480,9 @@ export class EnrichmentService {
if (data.rgMbid) updateData.rgMbid = data.rgMbid;
if (data.coverUrl) updateData.coverUrl = data.coverUrl;
if (data.releaseDate) {
// Store original release date in dedicated field
updateData.originalYear = data.releaseDate.getFullYear();
// Also update year for backward compatibility (but originalYear takes precedence)
updateData.year = data.releaseDate.getFullYear();
}
if (data.label) updateData.label = data.label;
@@ -492,7 +495,7 @@ export class EnrichmentService {
where: { id: albumId },
data: updateData,
});
console.log(
logger.debug(
` Saved album data: ${
data.genres?.length || 0
} genres, label: ${data.label || "none"}`
@@ -565,7 +568,7 @@ export class EnrichmentService {
},
});
console.log(`Starting enrichment for ${artists.length} artists...`);
logger.debug(`Starting enrichment for ${artists.length} artists...`);
for (const artist of artists) {
try {
@@ -634,7 +637,7 @@ export class EnrichmentService {
item: `${artist.name} - ${album.title}`,
error: error.message,
});
console.error(
logger.error(
` ✗ Failed to enrich ${artist.name} - ${album.title}:`,
error
);
@@ -649,11 +652,11 @@ export class EnrichmentService {
item: artist.name,
error: error.message,
});
console.error(` Failed to enrich ${artist.name}:`, error);
logger.error(` Failed to enrich ${artist.name}:`, error);
}
}
console.log(
logger.debug(
`Enrichment complete: ${result.itemsEnriched}/${result.itemsProcessed} items enriched`
);

View File

@@ -0,0 +1,354 @@
/**
* Enrichment Failure Service
*
* Tracks and manages failures during artist/track/audio enrichment.
* Provides visibility into what failed and allows selective retry.
*/
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
export interface EnrichmentFailure {
id: string;
entityType: "artist" | "track" | "audio";
entityId: string;
entityName: string | null;
errorMessage: string | null;
errorCode: string | null;
retryCount: number;
maxRetries: number;
firstFailedAt: Date;
lastFailedAt: Date;
skipped: boolean;
skippedAt: Date | null;
resolved: boolean;
resolvedAt: Date | null;
metadata: any;
}
export interface RecordFailureInput {
entityType: "artist" | "track" | "audio";
entityId: string;
entityName?: string;
errorMessage: string;
errorCode?: string;
metadata?: any;
}
export interface GetFailuresOptions {
entityType?: "artist" | "track" | "audio";
includeSkipped?: boolean;
includeResolved?: boolean;
limit?: number;
offset?: number;
}
class EnrichmentFailureService {
/**
* Record a failure (or increment retry count if already exists)
*/
async recordFailure(input: RecordFailureInput): Promise<EnrichmentFailure> {
const {
entityType,
entityId,
entityName,
errorMessage,
errorCode,
metadata,
} = input;
// Try to find existing failure
const existing = await prisma.enrichmentFailure.findUnique({
where: {
entityType_entityId: {
entityType,
entityId,
},
},
});
if (existing) {
// Update existing failure - cap retry count at maxRetries to prevent unbounded increment
const newRetryCount = Math.min(
existing.retryCount + 1,
existing.maxRetries
);
return await prisma.enrichmentFailure.update({
where: { id: existing.id },
data: {
errorMessage,
errorCode,
retryCount: newRetryCount,
lastFailedAt: new Date(),
metadata: metadata
? JSON.parse(JSON.stringify(metadata))
: existing.metadata,
},
}) as EnrichmentFailure;
} else {
// Create new failure
return await prisma.enrichmentFailure.create({
data: {
entityType,
entityId,
entityName,
errorMessage,
errorCode,
retryCount: 1,
maxRetries: 3,
metadata: metadata
? JSON.parse(JSON.stringify(metadata))
: null,
},
}) as EnrichmentFailure;
}
}
/**
* Get failures with filtering and pagination
*/
async getFailures(options: GetFailuresOptions = {}): Promise<{
failures: EnrichmentFailure[];
total: number;
}> {
const {
entityType,
includeSkipped = false,
includeResolved = false,
limit = 100,
offset = 0,
} = options;
const where: any = {};
if (entityType) {
where.entityType = entityType;
}
if (!includeSkipped) {
where.skipped = false;
}
if (!includeResolved) {
where.resolved = false;
}
const [failures, total] = await Promise.all([
prisma.enrichmentFailure.findMany({
where,
orderBy: { lastFailedAt: "desc" },
take: limit,
skip: offset,
}),
prisma.enrichmentFailure.count({ where }),
]);
return { failures: failures as unknown as EnrichmentFailure[], total };
}
/**
* Get failure counts by type
*/
async getFailureCounts(): Promise<{
artist: number;
track: number;
audio: number;
total: number;
}> {
const [artistCount, trackCount, audioCount] = await Promise.all([
prisma.enrichmentFailure.count({
where: {
entityType: "artist",
resolved: false,
skipped: false,
},
}),
prisma.enrichmentFailure.count({
where: { entityType: "track", resolved: false, skipped: false },
}),
prisma.enrichmentFailure.count({
where: { entityType: "audio", resolved: false, skipped: false },
}),
]);
return {
artist: artistCount,
track: trackCount,
audio: audioCount,
total: artistCount + trackCount + audioCount,
};
}
/**
* Get a single failure by ID
*/
async getFailure(id: string): Promise<EnrichmentFailure | null> {
return await prisma.enrichmentFailure.findUnique({
where: { id },
}) as unknown as EnrichmentFailure | null;
}
/**
* Mark failures as skipped (won't be retried automatically)
*/
async skipFailures(ids: string[]): Promise<number> {
const result = await prisma.enrichmentFailure.updateMany({
where: { id: { in: ids } },
data: {
skipped: true,
skippedAt: new Date(),
},
});
return result.count;
}
/**
* Mark failures as resolved (manually fixed)
*/
async resolveFailures(ids: string[]): Promise<number> {
const result = await prisma.enrichmentFailure.updateMany({
where: { id: { in: ids } },
data: {
resolved: true,
resolvedAt: new Date(),
},
});
return result.count;
}
/**
* Reset retry count for failures (prepare for retry)
*/
async resetRetryCount(ids: string[]): Promise<number> {
const result = await prisma.enrichmentFailure.updateMany({
where: { id: { in: ids } },
data: {
retryCount: 0,
},
});
return result.count;
}
/**
* Delete failures (cleanup resolved/old failures)
*/
async deleteFailures(ids: string[]): Promise<number> {
const result = await prisma.enrichmentFailure.deleteMany({
where: { id: { in: ids } },
});
return result.count;
}
/**
* Cleanup old resolved failures (older than specified days)
*/
async cleanupOldResolved(olderThanDays: number = 30): Promise<number> {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - olderThanDays);
const result = await prisma.enrichmentFailure.deleteMany({
where: {
resolved: true,
resolvedAt: {
lt: cutoffDate,
},
},
});
logger.debug(
`[Enrichment Failures] Cleaned up ${result.count} old resolved failures`
);
return result.count;
}
/**
* Check if an entity has failed too many times
*/
async hasExceededRetries(
entityType: string,
entityId: string
): Promise<boolean> {
const failure = await prisma.enrichmentFailure.findUnique({
where: {
entityType_entityId: {
entityType: entityType as any,
entityId,
},
},
});
if (!failure) return false;
return failure.retryCount >= failure.maxRetries;
}
/**
* Clear failure record (reset for fresh retry)
*/
async clearFailure(entityType: string, entityId: string): Promise<void> {
await prisma.enrichmentFailure.deleteMany({
where: {
entityType: entityType as any,
entityId,
},
});
}
/**
* Clean up failures for entities that no longer exist in the database.
* This resolves orphaned failure records where the track/artist was deleted.
*/
async cleanupOrphanedFailures(): Promise<{
cleaned: number;
checked: number;
}> {
// Get all unresolved failures
const failures = await prisma.enrichmentFailure.findMany({
where: { resolved: false, skipped: false },
select: { id: true, entityType: true, entityId: true },
});
const toResolve: string[] = [];
for (const failure of failures) {
let exists = false;
if (failure.entityType === "artist") {
const artist = await prisma.artist.findUnique({
where: { id: failure.entityId },
select: { id: true },
});
exists = !!artist;
} else if (
failure.entityType === "track" ||
failure.entityType === "audio"
) {
const track = await prisma.track.findUnique({
where: { id: failure.entityId },
select: { id: true },
});
exists = !!track;
}
if (!exists) {
toResolve.push(failure.id);
}
}
if (toResolve.length > 0) {
await this.resolveFailures(toResolve);
logger.debug(
`[Enrichment Failures] Cleaned up ${toResolve.length} orphaned failures`
);
}
return { cleaned: toResolve.length, checked: failures.length };
}
}
// Singleton instance
export const enrichmentFailureService = new EnrichmentFailureService();

View File

@@ -0,0 +1,267 @@
/**
* Enrichment State Management Service
*
* Manages the state of enrichment processes using Redis for cross-process coordination.
* Allows pause/resume/stop controls and tracks current progress.
*/
import { logger } from "../utils/logger";
import Redis from "ioredis";
import { config } from "../config";
const ENRICHMENT_STATE_KEY = "enrichment:state";
const ENRICHMENT_CONTROL_CHANNEL = "enrichment:control";
const AUDIO_CONTROL_CHANNEL = "audio:analysis:control";
export type EnrichmentStatus = "idle" | "running" | "paused" | "stopping";
export type EnrichmentPhase = "artists" | "tracks" | "audio" | null;
export interface EnrichmentState {
status: EnrichmentStatus;
startedAt?: string;
pausedAt?: string;
stoppedAt?: string;
currentPhase: EnrichmentPhase;
lastActivity: string;
completionNotificationSent?: boolean; // Prevent repeated completion notifications
stoppingInfo?: {
phase: string;
currentItem: string;
itemsRemaining: number;
};
// Progress tracking
artists: {
total: number;
completed: number;
failed: number;
current?: string; // Currently processing artist name
};
tracks: {
total: number;
completed: number;
failed: number;
current?: string; // Currently processing track
};
audio: {
total: number;
completed: number;
failed: number;
processing: number; // Currently in worker pool
};
}
class EnrichmentStateService {
private redis: Redis;
private publisher: Redis;
constructor() {
this.redis = new Redis(config.redisUrl);
this.publisher = new Redis(config.redisUrl);
}
/**
* Get current enrichment state
*/
async getState(): Promise<EnrichmentState | null> {
const data = await this.redis.get(ENRICHMENT_STATE_KEY);
if (!data) {
return null;
}
return JSON.parse(data);
}
/**
* Initialize enrichment state
*/
async initializeState(): Promise<EnrichmentState> {
const state: EnrichmentState = {
status: "running",
startedAt: new Date().toISOString(),
currentPhase: "artists",
lastActivity: new Date().toISOString(),
completionNotificationSent: false, // Reset notification flag on new enrichment
artists: { total: 0, completed: 0, failed: 0 },
tracks: { total: 0, completed: 0, failed: 0 },
audio: { total: 0, completed: 0, failed: 0, processing: 0 },
};
await this.setState(state);
return state;
}
/**
* Update enrichment state
*/
async setState(state: EnrichmentState): Promise<void> {
state.lastActivity = new Date().toISOString();
await this.redis.set(ENRICHMENT_STATE_KEY, JSON.stringify(state));
}
/**
* Update specific fields in state
* Auto-initializes state if it doesn't exist
*/
async updateState(
updates: Partial<EnrichmentState>
): Promise<EnrichmentState> {
let current = await this.getState();
// Auto-initialize if state doesn't exist
if (!current) {
logger.debug("[Enrichment State] State not found, initializing...");
current = await this.initializeState();
}
const updated = { ...current, ...updates };
await this.setState(updated);
return updated;
}
/**
* Pause enrichment process
*/
async pause(): Promise<EnrichmentState> {
const state = await this.getState();
if (!state) {
throw new Error("No active enrichment to pause");
}
if (state.status !== "running") {
throw new Error(`Cannot pause enrichment in ${state.status} state`);
}
const updated = await this.updateState({
status: "paused",
pausedAt: new Date().toISOString(),
});
// Notify workers via pub/sub
await this.publisher.publish(ENRICHMENT_CONTROL_CHANNEL, "pause");
await this.publisher.publish(AUDIO_CONTROL_CHANNEL, "pause");
logger.debug("[Enrichment State] Paused");
return updated;
}
/**
* Resume enrichment process
*/
async resume(): Promise<EnrichmentState> {
const state = await this.getState();
if (!state) {
throw new Error("No enrichment state to resume");
}
// Idempotent: If already running, return success
if (state.status === "running") {
logger.debug("[Enrichment State] Already running");
return state;
}
if (state.status !== "paused") {
throw new Error(
`Cannot resume enrichment in ${state.status} state`
);
}
const updated = await this.updateState({
status: "running",
pausedAt: undefined,
});
// Notify workers via pub/sub
await this.publisher.publish(ENRICHMENT_CONTROL_CHANNEL, "resume");
await this.publisher.publish(AUDIO_CONTROL_CHANNEL, "resume");
logger.debug("[Enrichment State] Resumed");
return updated;
}
/**
* Stop enrichment process
*/
async stop(): Promise<EnrichmentState> {
const state = await this.getState();
if (!state) {
throw new Error("No active enrichment to stop");
}
// Idempotent: If already idle, return success
if (state.status === "idle") {
logger.debug("[Enrichment State] Already stopped (idle)");
return state;
}
const updated = await this.updateState({
status: "stopping",
stoppedAt: new Date().toISOString(),
});
// Notify workers via pub/sub
await this.publisher.publish(ENRICHMENT_CONTROL_CHANNEL, "stop");
await this.publisher.publish(AUDIO_CONTROL_CHANNEL, "stop");
logger.debug("[Enrichment State] Stopping...");
// Transition to idle after a delay (workers will clean up)
setTimeout(async () => {
await this.updateState({ status: "idle", currentPhase: null });
logger.debug("[Enrichment State] Stopped and idle");
}, 5000);
return updated;
}
/**
* Clear enrichment state (set to idle)
*/
async clear(): Promise<void> {
await this.redis.del(ENRICHMENT_STATE_KEY);
logger.debug("[Enrichment State] Cleared");
}
/**
* Check if enrichment is currently running
*/
async isRunning(): Promise<boolean> {
const state = await this.getState();
return state?.status === "running";
}
/**
* Check if enrichment is paused
*/
async isPaused(): Promise<boolean> {
const state = await this.getState();
return state?.status === "paused";
}
/**
* Check for hung processes (no activity for > 15 minutes)
*/
async detectHang(): Promise<boolean> {
const state = await this.getState();
if (!state || state.status !== "running") {
return false;
}
const lastActivity = new Date(state.lastActivity);
const now = new Date();
const minutesSinceActivity =
(now.getTime() - lastActivity.getTime()) / (1000 * 60);
return minutesSinceActivity > 15;
}
/**
* Cleanup connections
*/
async disconnect(): Promise<void> {
await this.redis.quit();
await this.publisher.quit();
}
}
// Singleton instance
export const enrichmentStateService = new EnrichmentStateService();

View File

@@ -1,4 +1,5 @@
import axios, { AxiosInstance } from "axios";
import { logger } from "../utils/logger";
import { redisClient } from "../utils/redis";
import { getSystemSettings } from "../utils/systemSettings";
@@ -38,7 +39,7 @@ class FanartService {
const settings = await getSystemSettings();
if (settings?.fanartEnabled && settings?.fanartApiKey) {
this.apiKey = settings.fanartApiKey;
console.log("Fanart.tv configured from database");
logger.debug("Fanart.tv configured from database");
this.initialized = true;
return;
}
@@ -49,7 +50,7 @@ class FanartService {
// Fallback to .env
if (process.env.FANART_API_KEY) {
this.apiKey = process.env.FANART_API_KEY;
console.log("Fanart.tv configured from .env");
logger.debug("Fanart.tv configured from .env");
}
// Note: Not logging "not configured" here - it's optional and logs are spammy
this.initialized = true;
@@ -73,7 +74,7 @@ class FanartService {
if (redisClient.isOpen) {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(` Fanart.tv: Using cached image`);
logger.debug(` Fanart.tv: Using cached image`);
return cached;
}
}
@@ -82,7 +83,7 @@ class FanartService {
}
try {
console.log(` Fetching from Fanart.tv...`);
logger.debug(` Fetching from Fanart.tv...`);
const response = await this.client.get(`/music/${mbid}`, {
params: { api_key: this.apiKey },
});
@@ -98,39 +99,39 @@ class FanartService {
// If it's just a filename, construct the full URL
if (rawUrl && !rawUrl.startsWith("http")) {
rawUrl = `https://assets.fanart.tv/fanart/music/${mbid}/artistbackground/${rawUrl}`;
console.log(
logger.debug(
` Fanart.tv: Constructed full URL from filename`
);
}
imageUrl = rawUrl;
console.log(` Fanart.tv: Found artist background`);
logger.debug(` Fanart.tv: Found artist background`);
} else if (data.artistthumb && data.artistthumb.length > 0) {
let rawUrl = data.artistthumb[0].url;
// If it's just a filename, construct the full URL
if (rawUrl && !rawUrl.startsWith("http")) {
rawUrl = `https://assets.fanart.tv/fanart/music/${mbid}/artistthumb/${rawUrl}`;
console.log(
logger.debug(
` Fanart.tv: Constructed full URL from filename`
);
}
imageUrl = rawUrl;
console.log(` Fanart.tv: Found artist thumbnail`);
logger.debug(` Fanart.tv: Found artist thumbnail`);
} else if (data.hdmusiclogo && data.hdmusiclogo.length > 0) {
let rawUrl = data.hdmusiclogo[0].url;
// If it's just a filename, construct the full URL
if (rawUrl && !rawUrl.startsWith("http")) {
rawUrl = `https://assets.fanart.tv/fanart/music/${mbid}/hdmusiclogo/${rawUrl}`;
console.log(
logger.debug(
` Fanart.tv: Constructed full URL from filename`
);
}
imageUrl = rawUrl;
console.log(` Fanart.tv: Found HD logo`);
logger.debug(` Fanart.tv: Found HD logo`);
}
// Cache for 7 days
@@ -149,9 +150,9 @@ class FanartService {
return imageUrl;
} catch (error: any) {
if (error.response?.status === 404) {
console.log(`Fanart.tv: No images found`);
logger.debug(`Fanart.tv: No images found`);
} else {
console.error(` Fanart.tv error:`, error.message);
logger.error(` Fanart.tv error:`, error.message);
}
return null;
}

View File

@@ -1,4 +1,5 @@
import * as fs from "fs";
import { logger } from "../utils/logger";
import * as path from "path";
import { prisma } from "../utils/db";
import { config } from "../config";
@@ -26,7 +27,7 @@ export class FileValidatorService {
duration: 0,
};
console.log("[FileValidator] Starting library validation...");
logger.debug("[FileValidator] Starting library validation...");
// Get all tracks from the database
const tracks = await prisma.track.findMany({
@@ -37,7 +38,7 @@ export class FileValidatorService {
},
});
console.log(
logger.debug(
`[FileValidator] Found ${tracks.length} tracks to validate`
);
@@ -53,7 +54,7 @@ export class FileValidatorService {
// Prevent path traversal attacks
if (!absolutePath.startsWith(path.normalize(config.music.musicPath))) {
console.warn(
logger.warn(
`[FileValidator] Path traversal attempt detected: ${track.filePath}`
);
missingTrackIds.push(track.id);
@@ -64,7 +65,7 @@ export class FileValidatorService {
const exists = await this.fileExists(absolutePath);
if (!exists) {
console.log(
logger.debug(
`[FileValidator] Missing file: ${track.filePath} (${track.title})`
);
missingTrackIds.push(track.id);
@@ -74,12 +75,12 @@ export class FileValidatorService {
// Log progress every 100 tracks
if (result.tracksChecked % 100 === 0) {
console.log(
logger.debug(
`[FileValidator] Progress: ${result.tracksChecked}/${tracks.length} tracks checked, ${missingTrackIds.length} missing`
);
}
} catch (err: any) {
console.error(
logger.error(
`[FileValidator] Error checking ${track.filePath}:`,
err.message
);
@@ -93,7 +94,7 @@ export class FileValidatorService {
// Remove missing tracks from database
if (missingTrackIds.length > 0) {
console.log(
logger.debug(
`[FileValidator] Removing ${missingTrackIds.length} missing tracks from database...`
);
@@ -108,7 +109,7 @@ export class FileValidatorService {
result.duration = Date.now() - startTime;
console.log(
logger.debug(
`[FileValidator] Validation complete: ${result.tracksChecked} checked, ${result.tracksRemoved} removed (${result.duration}ms)`
);
@@ -150,7 +151,7 @@ export class FileValidatorService {
// Prevent path traversal attacks
if (!absolutePath.startsWith(path.normalize(config.music.musicPath))) {
console.warn(
logger.warn(
`[FileValidator] Path traversal attempt detected: ${track.filePath}`
);
return false;
@@ -159,7 +160,7 @@ export class FileValidatorService {
const exists = await this.fileExists(absolutePath);
if (!exists) {
console.log(
logger.debug(
`[FileValidator] Track file missing, removing from DB: ${track.title}`
);
await prisma.track.delete({

View File

@@ -8,6 +8,7 @@
* 4. Last.fm (fallback, often missing)
*/
import { logger } from "../utils/logger";
import axios from "axios";
export interface ImageSearchOptions {
@@ -36,7 +37,7 @@ export class ImageProviderService {
): Promise<ImageResult | null> {
const { timeout = 5000 } = options;
console.log(`[IMAGE] Searching for artist image: ${artistName}`);
logger.debug(`[IMAGE] Searching for artist image: ${artistName}`);
// Try Deezer first (most reliable)
try {
@@ -45,11 +46,11 @@ export class ImageProviderService {
timeout
);
if (deezerImage) {
console.log(` Found image from Deezer`);
logger.debug(` Found image from Deezer`);
return deezerImage;
}
} catch (error) {
console.log(
logger.debug(
` Deezer failed: ${
error instanceof Error ? error.message : "Unknown error"
}`
@@ -64,11 +65,11 @@ export class ImageProviderService {
timeout
);
if (fanartImage) {
console.log(` Found image from Fanart.tv`);
logger.debug(` Found image from Fanart.tv`);
return fanartImage;
}
} catch (error) {
console.log(
logger.debug(
`Fanart.tv failed: ${
error instanceof Error ? error.message : "Unknown error"
}`
@@ -84,11 +85,11 @@ export class ImageProviderService {
timeout
);
if (mbImage) {
console.log(` Found image from MusicBrainz`);
logger.debug(` Found image from MusicBrainz`);
return mbImage;
}
} catch (error) {
console.log(
logger.debug(
`MusicBrainz failed: ${
error instanceof Error ? error.message : "Unknown error"
}`
@@ -96,7 +97,7 @@ export class ImageProviderService {
}
}
console.log(` No artist image found from any source`);
logger.debug(` No artist image found from any source`);
return null;
}
@@ -111,7 +112,7 @@ export class ImageProviderService {
): Promise<ImageResult | null> {
const { timeout = 5000 } = options;
console.log(
logger.debug(
`[IMAGE] Searching for album cover: ${artistName} - ${albumTitle}`
);
@@ -123,11 +124,11 @@ export class ImageProviderService {
timeout
);
if (deezerCover) {
console.log(` Found cover from Deezer`);
logger.debug(` Found cover from Deezer`);
return deezerCover;
}
} catch (error) {
console.log(
logger.debug(
` Deezer failed: ${
error instanceof Error ? error.message : "Unknown error"
}`
@@ -142,11 +143,11 @@ export class ImageProviderService {
timeout
);
if (mbCover) {
console.log(` Found cover from MusicBrainz`);
logger.debug(` Found cover from MusicBrainz`);
return mbCover;
}
} catch (error) {
console.log(
logger.debug(
`MusicBrainz failed: ${
error instanceof Error ? error.message : "Unknown error"
}`
@@ -162,11 +163,11 @@ export class ImageProviderService {
timeout
);
if (fanartCover) {
console.log(` Found cover from Fanart.tv`);
logger.debug(` Found cover from Fanart.tv`);
return fanartCover;
}
} catch (error) {
console.log(
logger.debug(
`Fanart.tv failed: ${
error instanceof Error ? error.message : "Unknown error"
}`
@@ -174,7 +175,7 @@ export class ImageProviderService {
}
}
console.log(` No album cover found from any source`);
logger.debug(` No album cover found from any source`);
return null;
}
@@ -407,7 +408,7 @@ export class ImageProviderService {
}
}
} catch (error) {
console.log(
logger.debug(
`Last.fm failed: ${
error instanceof Error ? error.message : "Unknown error"
}`

View File

@@ -1,4 +1,5 @@
import axios, { AxiosInstance } from "axios";
import { logger } from "../utils/logger";
import { redisClient } from "../utils/redis";
interface ItunesPodcast {
@@ -51,7 +52,7 @@ class ItunesService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
await this.rateLimit();
@@ -60,7 +61,7 @@ class ItunesService {
try {
await redisClient.setEx(cacheKey, ttlSeconds, JSON.stringify(data));
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return data;
@@ -234,13 +235,13 @@ class ItunesService {
const keywords = this.extractSearchKeywords(title, description, author);
if (keywords.length === 0) {
console.log(
logger.debug(
"No keywords extracted for similar podcast search, falling back to title"
);
return this.searchPodcasts(title, limit);
}
console.log(
logger.debug(
` Searching for similar podcasts using keywords: ${keywords.join(", ")}`
);
@@ -275,31 +276,31 @@ class ItunesService {
genreId: number,
limit = 20
): Promise<ItunesPodcast[]> {
console.log(`[iTunes SERVICE] getTopPodcastsByGenre called with genre=${genreId}, limit=${limit}`);
logger.debug(`[iTunes SERVICE] getTopPodcastsByGenre called with genre=${genreId}, limit=${limit}`);
const cacheKey = `itunes:genre:${genreId}:${limit}`;
console.log(`[iTunes SERVICE] Cache key: ${cacheKey}`);
logger.debug(`[iTunes SERVICE] Cache key: ${cacheKey}`);
const result = await this.cachedRequest(
cacheKey,
async () => {
try {
console.log(`[iTunes] Fetching genre ${genreId} from RSS feed...`);
logger.debug(`[iTunes] Fetching genre ${genreId} from RSS feed...`);
// Use iTunes RSS feed for top podcasts by genre
const response = await this.client.get(
`/us/rss/toppodcasts/genre=${genreId}/limit=${limit}/json`
);
console.log(`[iTunes] Response status: ${response.status}`);
console.log(`[iTunes] Has feed data: ${!!response.data?.feed}`);
console.log(`[iTunes] Entries count: ${response.data?.feed?.entry?.length || 0}`);
logger.debug(`[iTunes] Response status: ${response.status}`);
logger.debug(`[iTunes] Has feed data: ${!!response.data?.feed}`);
logger.debug(`[iTunes] Entries count: ${response.data?.feed?.entry?.length || 0}`);
const entries = response.data?.feed?.entry || [];
// If only one entry, it might not be an array
const entriesArray = Array.isArray(entries) ? entries : [entries];
console.log(`[iTunes] Processing ${entriesArray.length} entries`);
logger.debug(`[iTunes] Processing ${entriesArray.length} entries`);
// Convert RSS feed format to our podcast format
const podcasts = entriesArray.map((entry: any) => {
@@ -315,21 +316,21 @@ class ItunesService {
primaryGenreName: entry.category?.attributes?.label,
collectionViewUrl: entry.link?.attributes?.href,
};
console.log(`[iTunes] Mapped podcast: ${podcast.collectionName} (ID: ${podcast.collectionId})`);
logger.debug(`[iTunes] Mapped podcast: ${podcast.collectionName} (ID: ${podcast.collectionId})`);
return podcast;
}).filter((p: any) => p.collectionId > 0); // Filter out invalid entries
console.log(`[iTunes] Returning ${podcasts.length} valid podcasts`);
logger.debug(`[iTunes] Returning ${podcasts.length} valid podcasts`);
return podcasts;
} catch (error) {
console.error(`[iTunes] ERROR in requestFn:`, error);
logger.error(`[iTunes] ERROR in requestFn:`, error);
return [];
}
},
2592000 // 30 days
);
console.log(`[iTunes SERVICE] cachedRequest returned ${result.length} podcasts`);
logger.debug(`[iTunes SERVICE] cachedRequest returned ${result.length} podcasts`);
return result;
}
}

View File

@@ -1,4 +1,5 @@
import axios, { AxiosInstance } from "axios";
import { logger } from "../utils/logger";
import * as fuzz from "fuzzball";
import { config } from "../config";
import { redisClient } from "../utils/redis";
@@ -6,6 +7,7 @@ import { getSystemSettings } from "../utils/systemSettings";
import { fanartService } from "./fanart";
import { deezerService } from "./deezer";
import { rateLimiter } from "./rateLimiter";
import { normalizeToArray } from "../utils/normalize";
interface SimilarArtist {
name: string;
@@ -39,24 +41,34 @@ class LastFmService {
const settings = await getSystemSettings();
if (settings?.lastfmApiKey) {
this.apiKey = settings.lastfmApiKey;
console.log("Last.fm configured from user settings");
logger.debug("Last.fm configured from user settings");
} else if (this.apiKey) {
console.log("Last.fm configured (default app key)");
logger.debug("Last.fm configured (default app key)");
}
} catch (err) {
// DB not ready yet, use default/env key
if (this.apiKey) {
console.log("Last.fm configured (default app key)");
logger.debug("Last.fm configured (default app key)");
}
}
if (!this.apiKey) {
console.warn("Last.fm API key not available");
logger.warn("Last.fm API key not available");
}
this.initialized = true;
}
/**
* Refresh the API key from current settings
* Called when system settings are updated to pick up new key
*/
async refreshApiKey(): Promise<void> {
this.initialized = false;
await this.ensureInitialized();
logger.debug("Last.fm API key refreshed from settings");
}
private async request<T = any>(params: Record<string, any>) {
await this.ensureInitialized();
const response = await rateLimiter.execute("lastfm", () =>
@@ -78,7 +90,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -107,7 +119,7 @@ class LastFmService {
JSON.stringify(results)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return results;
@@ -117,13 +129,13 @@ class LastFmService {
error.response?.status === 404 ||
error.response?.data?.error === 6
) {
console.log(
logger.debug(
`Artist MBID not found on Last.fm, trying name search: ${artistName}`
);
return this.getSimilarArtistsByName(artistName, limit);
}
console.error(`Last.fm error for ${artistName}:`, error);
logger.error(`Last.fm error for ${artistName}:`, error);
return [];
}
}
@@ -140,7 +152,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -169,12 +181,12 @@ class LastFmService {
JSON.stringify(results)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return results;
} catch (error) {
console.error(`Last.fm error for ${artistName}:`, error);
logger.error(`Last.fm error for ${artistName}:`, error);
return [];
}
}
@@ -188,7 +200,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -202,20 +214,38 @@ class LastFmService {
const album = data.album;
// Cache for 30 days
try {
await redisClient.setEx(
cacheKey,
2592000,
JSON.stringify(album)
);
} catch (err) {
console.warn("Redis set error:", err);
// Normalize arrays before caching/returning
if (album) {
const normalized = {
...album,
image: normalizeToArray(album.image),
tags: album.tags ? {
...album.tags,
tag: normalizeToArray(album.tags.tag)
} : album.tags,
tracks: album.tracks ? {
...album.tracks,
track: normalizeToArray(album.tracks.track)
} : album.tracks
};
// Cache for 30 days
try {
await redisClient.setEx(
cacheKey,
2592000,
JSON.stringify(normalized)
);
} catch (err) {
logger.warn("Redis set error:", err);
}
return normalized;
}
return album;
} catch (error) {
console.error(`Last.fm album info error for ${albumName}:`, error);
logger.error(`Last.fm album info error for ${albumName}:`, error);
return null;
}
}
@@ -229,7 +259,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -251,12 +281,12 @@ class LastFmService {
JSON.stringify(albums)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return albums;
} catch (error) {
console.error(`Last.fm tag albums error for ${tag}:`, error);
logger.error(`Last.fm tag albums error for ${tag}:`, error);
return [];
}
}
@@ -270,7 +300,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -293,12 +323,12 @@ class LastFmService {
JSON.stringify(tracks)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return tracks;
} catch (error) {
console.error(
logger.error(
`Last.fm similar tracks error for ${trackName}:`,
error
);
@@ -319,7 +349,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -348,12 +378,12 @@ class LastFmService {
JSON.stringify(tracks)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return tracks;
} catch (error) {
console.error(`Last.fm top tracks error for ${artistName}:`, error);
logger.error(`Last.fm top tracks error for ${artistName}:`, error);
return [];
}
}
@@ -371,7 +401,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -400,12 +430,12 @@ class LastFmService {
JSON.stringify(albums)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return albums;
} catch (error) {
console.error(`Last.fm top albums error for ${artistName}:`, error);
logger.error(`Last.fm top albums error for ${artistName}:`, error);
return [];
}
}
@@ -428,9 +458,27 @@ class LastFmService {
}
const data = await this.request(params);
return data.artist;
const artist = data.artist;
// Normalize arrays before returning
if (artist) {
return {
...artist,
image: normalizeToArray(artist.image),
tags: artist.tags ? {
...artist.tags,
tag: normalizeToArray(artist.tags.tag)
} : artist.tags,
similar: artist.similar ? {
...artist.similar,
artist: normalizeToArray(artist.similar.artist)
} : artist.similar
};
}
return artist;
} catch (error) {
console.error(
logger.error(
`Last.fm artist info error for ${artistName}:`,
error
);
@@ -538,7 +586,7 @@ class LastFmService {
name: artist.name,
listeners: parseInt(artist.listeners || "0", 10),
url: artist.url,
image: this.getBestImage(artist.image),
image: this.getBestImage(normalizeToArray(artist.image)),
mbid: artist.mbid,
bio: null,
tags: [] as string[],
@@ -587,7 +635,7 @@ class LastFmService {
album: track.album || null,
listeners: parseInt(track.listeners || "0", 10),
url: track.url,
image: this.getBestImage(track.image),
image: this.getBestImage(normalizeToArray(track.image)),
mbid: track.mbid,
};
@@ -633,7 +681,7 @@ class LastFmService {
const artists = data.results?.artistmatches?.artist || [];
console.log(
logger.debug(
`\n [LAST.FM SEARCH] Found ${artists.length} artists (before filtering)`
);
@@ -675,11 +723,11 @@ class LastFmService {
wordMatches,
};
})
.filter(({ similarity, wordMatches }) => {
.filter(({ similarity, wordMatches }: { similarity: number; wordMatches: number }) => {
if (!queryLower) return true;
return similarity >= 50 || wordMatches >= minWordMatches;
})
.sort((a, b) => {
.sort((a: any, b: any) => {
return (
Number(b.hasMbid) - Number(a.hasMbid) ||
b.wordMatches - a.wordMatches ||
@@ -728,7 +776,7 @@ class LastFmService {
uniqueArtists.push(candidate);
}
} catch (error) {
console.warn(
logger.warn(
"[LAST.FM SEARCH] Similar artist fallback failed:",
error
);
@@ -737,7 +785,7 @@ class LastFmService {
const limitedArtists = uniqueArtists.slice(0, limit);
console.log(
logger.debug(
` → Filtered to ${limitedArtists.length} relevant matches (limit: ${limit})`
);
@@ -761,7 +809,7 @@ class LastFmService {
return [...enriched, ...fast].filter(Boolean);
} catch (error) {
console.error("Last.fm artist search error:", error);
logger.error("Last.fm artist search error:", error);
return [];
}
}
@@ -781,7 +829,7 @@ class LastFmService {
const tracks = data.results?.trackmatches?.track || [];
console.log(
logger.debug(
`\n [LAST.FM TRACK SEARCH] Found ${tracks.length} tracks`
);
@@ -811,7 +859,7 @@ class LastFmService {
return [...enriched, ...fast].filter(Boolean);
} catch (error) {
console.error("Last.fm track search error:", error);
logger.error("Last.fm track search error:", error);
return [];
}
}
@@ -829,13 +877,96 @@ class LastFmService {
format: "json",
});
return data.track;
const track = data.track;
// Normalize arrays before returning
if (track) {
return {
...track,
toptags: track.toptags ? {
...track.toptags,
tag: normalizeToArray(track.toptags.tag)
} : track.toptags,
album: track.album ? {
...track.album,
image: normalizeToArray(track.album.image)
} : track.album
};
}
return track;
} catch (error) {
// Don't log errors for track info (many tracks don't have full info)
return null;
}
}
/**
* Get the canonical artist name from Last.fm correction API
* Resolves aliases and misspellings to official artist names
*
* @param artistName - The artist name to check for corrections
* @returns The canonical artist name info, or null if no correction found
*
* @example
* getArtistCorrection("of mice") // Returns { corrected: true, canonicalName: "Of Mice & Men", mbid: "..." }
* getArtistCorrection("bjork") // Returns { corrected: true, canonicalName: "Björk", mbid: "..." }
*/
async getArtistCorrection(artistName: string): Promise<{
corrected: boolean;
canonicalName: string;
mbid?: string;
} | null> {
const cacheKey = `lastfm:correction:${artistName.toLowerCase().trim()}`;
// Check cache first (30-day TTL)
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
return cached === "null" ? null : JSON.parse(cached);
}
} catch (err) {
logger.warn("Redis get error:", err);
}
try {
const data = await this.request({
method: "artist.getCorrection",
artist: artistName,
api_key: this.apiKey,
format: "json",
});
const correction = data.corrections?.correction?.artist;
if (!correction || !correction.name) {
// Cache null result
await redisClient.setEx(cacheKey, 2592000, "null");
return null;
}
const result = {
corrected:
correction.name.toLowerCase() !== artistName.toLowerCase(),
canonicalName: correction.name,
mbid: correction.mbid || undefined,
};
// Cache for 30 days
await redisClient.setEx(cacheKey, 2592000, JSON.stringify(result));
return result;
} catch (error: any) {
// Error 6 = "Artist not found" - cache negative result
if (error.response?.data?.error === 6) {
await redisClient.setEx(cacheKey, 2592000, "null");
return null;
}
logger.error(`Last.fm correction error for ${artistName}:`, error);
return null;
}
}
/**
* Get popular artists from Last.fm charts
*/
@@ -844,7 +975,7 @@ class LastFmService {
// Return empty if no API key configured
if (!this.apiKey) {
console.warn(
logger.warn(
"Last.fm: Cannot fetch chart artists - no API key configured"
);
return [];
@@ -858,7 +989,7 @@ class LastFmService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
try {
@@ -901,7 +1032,7 @@ class LastFmService {
// Last fallback to Last.fm images (but filter placeholders)
if (!image) {
const lastFmImage = this.getBestImage(artist.image);
const lastFmImage = this.getBestImage(normalizeToArray(artist.image));
if (
lastFmImage &&
!lastFmImage.includes(
@@ -933,12 +1064,12 @@ class LastFmService {
JSON.stringify(detailedArtists)
);
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return detailedArtists;
} catch (error) {
console.error("Last.fm chart artists error:", error);
logger.error("Last.fm chart artists error:", error);
return [];
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
* instant mood mix generation through simple database lookups.
*/
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
// Mood configuration with scoring rules
@@ -16,6 +17,7 @@ export const MOOD_CONFIG = {
name: "Happy & Upbeat",
color: "from-yellow-400 to-orange-500",
icon: "Smile",
moodTagKeywords: ["happy", "upbeat", "cheerful", "joyful", "positive"],
// Primary: ML mood prediction
primary: { moodHappy: { min: 0.5 }, moodSad: { max: 0.4 } },
// Fallback: basic audio features
@@ -25,6 +27,7 @@ export const MOOD_CONFIG = {
name: "Melancholic",
color: "from-blue-600 to-indigo-700",
icon: "CloudRain",
moodTagKeywords: ["sad", "melancholic", "melancholy", "dark", "somber"],
primary: { moodSad: { min: 0.5 }, moodHappy: { max: 0.4 } },
fallback: { valence: { max: 0.35 }, keyScale: "minor" },
},
@@ -32,6 +35,7 @@ export const MOOD_CONFIG = {
name: "Chill & Relaxed",
color: "from-teal-400 to-cyan-500",
icon: "Wind",
moodTagKeywords: ["relaxed", "chill", "calm", "mellow"],
primary: { moodRelaxed: { min: 0.5 }, moodAggressive: { max: 0.3 } },
fallback: { energy: { max: 0.5 }, arousal: { max: 0.5 } },
},
@@ -39,6 +43,7 @@ export const MOOD_CONFIG = {
name: "High Energy",
color: "from-red-500 to-orange-600",
icon: "Zap",
moodTagKeywords: ["energetic", "powerful", "exciting"],
primary: { arousal: { min: 0.6 }, energy: { min: 0.7 } },
fallback: { bpm: { min: 120 }, energy: { min: 0.7 } },
},
@@ -46,6 +51,7 @@ export const MOOD_CONFIG = {
name: "Dance Party",
color: "from-pink-500 to-rose-600",
icon: "PartyPopper",
moodTagKeywords: ["party", "danceable", "groovy"],
primary: { moodParty: { min: 0.5 }, danceability: { min: 0.6 } },
fallback: { danceability: { min: 0.7 }, energy: { min: 0.6 } },
},
@@ -53,6 +59,7 @@ export const MOOD_CONFIG = {
name: "Focus Mode",
color: "from-purple-600 to-violet-700",
icon: "Brain",
moodTagKeywords: ["instrumental"],
primary: { instrumentalness: { min: 0.5 }, moodRelaxed: { min: 0.3 } },
fallback: {
instrumentalness: { min: 0.5 },
@@ -63,6 +70,7 @@ export const MOOD_CONFIG = {
name: "Deep Feels",
color: "from-gray-700 to-slate-800",
icon: "Moon",
moodTagKeywords: ["sad", "melancholic", "emotional", "dark"],
primary: { moodSad: { min: 0.4 }, valence: { max: 0.4 } },
fallback: { valence: { max: 0.35 }, keyScale: "minor" },
},
@@ -70,6 +78,7 @@ export const MOOD_CONFIG = {
name: "Intense",
color: "from-red-700 to-gray-900",
icon: "Flame",
moodTagKeywords: ["aggressive", "angry"],
primary: { moodAggressive: { min: 0.5 } },
fallback: { energy: { min: 0.8 }, arousal: { min: 0.7 } },
},
@@ -77,6 +86,7 @@ export const MOOD_CONFIG = {
name: "Acoustic Vibes",
color: "from-amber-500 to-yellow-600",
icon: "Guitar",
moodTagKeywords: ["acoustic"],
primary: { moodAcoustic: { min: 0.5 }, moodElectronic: { max: 0.4 } },
fallback: {
acousticness: { min: 0.6 },
@@ -123,6 +133,7 @@ interface TrackWithAnalysis {
instrumentalness: number | null;
bpm: number | null;
keyScale: string | null;
moodTags: string[];
}
export class MoodBucketService {
@@ -153,11 +164,12 @@ export class MoodBucketService {
instrumentalness: true,
bpm: true,
keyScale: true,
moodTags: true,
},
});
if (!track || track.analysisStatus !== "completed") {
console.log(
logger.debug(
`[MoodBucket] Track ${trackId} not analyzed yet, skipping`
);
return [];
@@ -199,7 +211,7 @@ export class MoodBucketService {
.filter(([_, score]) => score > 0)
.map(([mood]) => mood);
console.log(
logger.debug(
`[MoodBucket] Track ${trackId} assigned to moods: ${
assignedMoods.join(", ") || "none"
}`
@@ -226,6 +238,16 @@ export class MoodBucketService {
acoustic: 0,
};
// Check if we have individual mood fields OR moodTags
const hasIndividualMoods = track.moodHappy !== null || track.moodSad !== null;
const hasMoodTags = track.moodTags && track.moodTags.length > 0;
// If we have moodTags but no individual mood fields, parse moodTags
if (!hasIndividualMoods && hasMoodTags) {
return this.calculateMoodScoresFromTags(track.moodTags);
}
// Otherwise use original logic
for (const [mood, config] of Object.entries(MOOD_CONFIG)) {
const rules = isEnhanced ? config.primary : config.fallback;
const score = this.evaluateMoodRules(track, rules);
@@ -235,6 +257,43 @@ export class MoodBucketService {
return scores;
}
/**
* Calculate mood scores from moodTags array
* Used when individual mood fields are not populated
*/
private calculateMoodScoresFromTags(moodTags: string[]): Record<MoodType, number> {
const scores: Record<MoodType, number> = {
happy: 0,
sad: 0,
chill: 0,
energetic: 0,
party: 0,
focus: 0,
melancholy: 0,
aggressive: 0,
acoustic: 0,
};
const normalizedTags = moodTags.map(tag => tag.toLowerCase());
for (const [mood, config] of Object.entries(MOOD_CONFIG)) {
const keywords = config.moodTagKeywords;
let matchCount = 0;
for (const keyword of keywords) {
if (normalizedTags.includes(keyword)) {
matchCount++;
}
}
if (matchCount > 0) {
scores[mood as MoodType] = Math.min(1.0, 0.3 + (matchCount - 1) * 0.2);
}
}
return scores;
}
/**
* Evaluate mood rules against track features
* Returns a score 0-1 based on how well the track matches the rules
@@ -380,7 +439,7 @@ export class MoodBucketService {
});
if (moodBuckets.length < 8) {
console.log(
logger.debug(
`[MoodBucket] Not enough tracks for mood ${mood}: ${moodBuckets.length}`
);
return null;
@@ -465,7 +524,7 @@ export class MoodBucketService {
},
});
console.log(
logger.debug(
`[MoodBucket] Saved ${mood} mix for user ${userId} (${mix.trackCount} tracks)`
);
@@ -532,7 +591,7 @@ export class MoodBucketService {
let assigned = 0;
let skip = 0;
console.log("[MoodBucket] Starting backfill of all analyzed tracks...");
logger.debug("[MoodBucket] Starting backfill of all analyzed tracks...");
while (true) {
const tracks = await prisma.track.findMany({
@@ -555,6 +614,7 @@ export class MoodBucketService {
instrumentalness: true,
bpm: true,
keyScale: true,
moodTags: true,
},
skip,
take: batchSize,
@@ -601,12 +661,12 @@ export class MoodBucketService {
}
skip += batchSize;
console.log(
logger.debug(
`[MoodBucket] Backfill progress: ${processed} tracks processed, ${assigned} mood assignments`
);
}
console.log(
logger.debug(
`[MoodBucket] Backfill complete: ${processed} tracks processed, ${assigned} mood assignments`
);
return { processed, assigned };

View File

@@ -1,11 +1,18 @@
import * as fs from "fs";
import { logger } from "../utils/logger";
import * as path from "path";
import { parseFile } from "music-metadata";
import { prisma } from "../utils/db";
import PQueue from "p-queue";
import { CoverArtExtractor } from "./coverArtExtractor";
import { deezerService } from "./deezer";
import { normalizeArtistName, areArtistNamesSimilar, canonicalizeVariousArtists } from "../utils/artistNormalization";
import {
normalizeArtistName,
areArtistNamesSimilar,
canonicalizeVariousArtists,
extractPrimaryArtist,
parseArtistFromPath,
} from "../utils/artistNormalization";
// Supported audio formats
const AUDIO_EXTENSIONS = new Set([
@@ -64,11 +71,11 @@ export class MusicScannerService {
duration: 0,
};
console.log(`Starting library scan: ${musicPath}`);
logger.debug(`Starting library scan: ${musicPath}`);
// Step 1: Find all audio files
const audioFiles = await this.findAudioFiles(musicPath);
console.log(`Found ${audioFiles.length} audio files`);
logger.debug(`Found ${audioFiles.length} audio files`);
// Step 2: Get existing tracks from database
const existingTracks = await prisma.track.findMany({
@@ -135,7 +142,7 @@ export class MusicScannerService {
};
result.errors.push(error);
progress.errors.push(error);
console.error(`Error processing ${audioFile}:`, err);
logger.error(`Error processing ${audioFile}:`, err);
} finally {
filesScanned++;
progress.filesScanned = filesScanned;
@@ -161,7 +168,7 @@ export class MusicScannerService {
},
});
result.tracksRemoved = tracksToRemove.length;
console.log(`Removed ${tracksToRemove.length} missing tracks`);
logger.debug(`Removed ${tracksToRemove.length} missing tracks`);
}
// Step 5: Clean up orphaned albums (albums with no tracks)
@@ -173,7 +180,7 @@ export class MusicScannerService {
});
if (orphanedAlbums.length > 0) {
console.log(`Removing ${orphanedAlbums.length} orphaned albums...`);
logger.debug(`Removing ${orphanedAlbums.length} orphaned albums...`);
await prisma.album.deleteMany({
where: {
id: { in: orphanedAlbums.map((a) => a.id) },
@@ -190,7 +197,13 @@ export class MusicScannerService {
});
if (orphanedArtists.length > 0) {
console.log(`Removing ${orphanedArtists.length} orphaned artists: ${orphanedArtists.map(a => a.name).join(', ')}`);
logger.debug(
`Removing ${
orphanedArtists.length
} orphaned artists: ${orphanedArtists
.map((a) => a.name)
.join(", ")}`
);
await prisma.artist.deleteMany({
where: {
id: { in: orphanedArtists.map((a) => a.id) },
@@ -199,79 +212,13 @@ export class MusicScannerService {
}
result.duration = Date.now() - startTime;
console.log(
logger.debug(
`Scan complete: +${result.tracksAdded} ~${result.tracksUpdated} -${result.tracksRemoved} (${result.duration}ms)`
);
return result;
}
/**
* Extract the primary artist from collaboration strings
* Examples:
* "CHVRCHES & Robert Smith" -> "CHVRCHES"
* "Artist feat. Someone" -> "Artist"
* "Artist ft. Someone" -> "Artist"
* "Artist, Someone" -> "Artist"
*
* But preserves band names:
* "Earth, Wind & Fire" -> "Earth, Wind & Fire" (kept as-is)
* "The Naked and Famous" -> "The Naked and Famous" (kept as-is)
*/
private extractPrimaryArtist(artistName: string): string {
// Trim whitespace
artistName = artistName.trim();
// HIGH PRIORITY: These patterns almost always indicate collaborations
// (not band names) so we always split on them
const definiteCollaborationPatterns = [
/ feat\.? /i, // "feat." or "feat "
/ ft\.? /i, // "ft." or "ft "
/ featuring /i,
];
for (const pattern of definiteCollaborationPatterns) {
const match = artistName.split(pattern);
if (match.length > 1) {
return match[0].trim();
}
}
// LOWER PRIORITY: These might be band names, so only split if the result
// looks like a complete artist name (not truncated)
const ambiguousPatterns = [
{ pattern: / \& /, name: "&" }, // "Earth, Wind & Fire" shouldn't split
{ pattern: / and /i, name: "and" }, // "The Naked and Famous" shouldn't split
{ pattern: / with /i, name: "with" },
{ pattern: /, /, name: "," },
];
for (const { pattern } of ambiguousPatterns) {
const parts = artistName.split(pattern);
if (parts.length > 1) {
const firstPart = parts[0].trim();
const lastWord = firstPart.split(/\s+/).pop()?.toLowerCase() || "";
// Don't split if the first part ends with common incomplete words
// These suggest it's a band name, not a collaboration
const incompleteEndings = ["the", "a", "an", "and", "of", ","];
if (incompleteEndings.includes(lastWord)) {
continue; // Skip this pattern, try the next one
}
// Don't split if the first part is very short (likely incomplete)
if (firstPart.length < 4) {
continue;
}
return firstPart;
}
}
// No collaboration found, return as-is
return artistName;
}
/**
* Check if a file path is within the discovery folder
* Discovery albums are stored in paths like "discovery/Artist/Album/track.flac"
@@ -294,12 +241,13 @@ export class MusicScannerService {
return str
.toLowerCase()
.trim()
.normalize('NFD').replace(/[\u0300-\u036f]/g, '') // Remove diacritics (café → cafe)
.replace(/[''´`]/g, "'") // Normalize apostrophes
.replace(/[""„]/g, '"') // Normalize quotes
.replace(/[–—−]/g, '-') // Normalize dashes
.replace(/\s+/g, ' ') // Collapse whitespace
.replace(/[^\w\s'"-]/g, ''); // Remove other special chars
.normalize("NFD")
.replace(/[\u0300-\u036f]/g, "") // Remove diacritics (café → cafe)
.replace(/[''´`]/g, "'") // Normalize apostrophes
.replace(/[""„]/g, '"') // Normalize quotes
.replace(/[–—−]/g, "-") // Normalize dashes
.replace(/\s+/g, " ") // Collapse whitespace
.replace(/[^\w\s'"-]/g, ""); // Remove other special chars
}
/**
@@ -314,16 +262,23 @@ export class MusicScannerService {
const normalizedArtist = this.normalizeForMatching(artistName);
const normalizedAlbum = this.normalizeForMatching(albumTitle);
// Also try with primary artist extracted (handles "Artist A feat. Artist B")
const primaryArtist = this.extractPrimaryArtist(artistName);
const normalizedPrimaryArtist = this.normalizeForMatching(primaryArtist);
console.log(`[Scanner] Checking discovery: "${artistName}" → "${normalizedArtist}"`);
// Also try with primary artist extracted (handles "Artist A feat. Artist B")
const primaryArtist = extractPrimaryArtist(artistName);
const normalizedPrimaryArtist =
this.normalizeForMatching(primaryArtist);
logger.debug(
`[Scanner] Checking discovery: "${artistName}" -> "${normalizedArtist}"`
);
if (primaryArtist !== artistName) {
console.log(`[Scanner] Primary artist: "${primaryArtist}" → "${normalizedPrimaryArtist}"`);
logger.debug(
`[Scanner] Primary artist: "${primaryArtist}" -> "${normalizedPrimaryArtist}"`
);
}
console.log(`[Scanner] Album: "${albumTitle}" → "${normalizedAlbum}"`);
logger.debug(
`[Scanner] Album: "${albumTitle}" -> "${normalizedAlbum}"`
);
try {
// Get all discovery jobs (pending, processing, or recently completed)
@@ -334,16 +289,26 @@ export class MusicScannerService {
},
});
console.log(`[Scanner] Found ${discoveryJobs.length} discovery jobs to check`);
logger.debug(
`[Scanner] Found ${discoveryJobs.length} discovery jobs to check`
);
// Pass 1: Exact match after normalization
for (const job of discoveryJobs) {
const metadata = job.metadata as any;
const jobArtist = this.normalizeForMatching(metadata?.artistName || "");
const jobAlbum = this.normalizeForMatching(metadata?.albumTitle || "");
const jobArtist = this.normalizeForMatching(
metadata?.artistName || ""
);
const jobAlbum = this.normalizeForMatching(
metadata?.albumTitle || ""
);
if ((jobArtist === normalizedArtist || jobArtist === normalizedPrimaryArtist) && jobAlbum === normalizedAlbum) {
console.log(`[Scanner] EXACT MATCH: job ${job.id}`);
if (
(jobArtist === normalizedArtist ||
jobArtist === normalizedPrimaryArtist) &&
jobAlbum === normalizedAlbum
) {
logger.debug(`[Scanner] EXACT MATCH: job ${job.id}`);
return true;
}
}
@@ -351,23 +316,31 @@ export class MusicScannerService {
// Pass 2: Partial match fallback (handles "Album" vs "Album (Deluxe)")
for (const job of discoveryJobs) {
const metadata = job.metadata as any;
const jobArtist = this.normalizeForMatching(metadata?.artistName || "");
const jobAlbum = this.normalizeForMatching(metadata?.albumTitle || "");
const jobArtist = this.normalizeForMatching(
metadata?.artistName || ""
);
const jobAlbum = this.normalizeForMatching(
metadata?.albumTitle || ""
);
// Try matching both full artist name and extracted primary artist
const artistMatch = jobArtist === normalizedArtist ||
jobArtist === normalizedPrimaryArtist ||
normalizedArtist.includes(jobArtist) ||
jobArtist.includes(normalizedArtist) ||
normalizedPrimaryArtist.includes(jobArtist) ||
jobArtist.includes(normalizedPrimaryArtist);
const albumMatch = jobAlbum === normalizedAlbum ||
normalizedAlbum.includes(jobAlbum) ||
jobAlbum.includes(normalizedAlbum);
const artistMatch =
jobArtist === normalizedArtist ||
jobArtist === normalizedPrimaryArtist ||
normalizedArtist.includes(jobArtist) ||
jobArtist.includes(normalizedArtist) ||
normalizedPrimaryArtist.includes(jobArtist) ||
jobArtist.includes(normalizedPrimaryArtist);
const albumMatch =
jobAlbum === normalizedAlbum ||
normalizedAlbum.includes(jobAlbum) ||
jobAlbum.includes(normalizedAlbum);
if (artistMatch && albumMatch) {
console.log(`[Scanner] PARTIAL MATCH: job ${job.id}`);
console.log(`[Scanner] Job: "${jobArtist}" - "${jobAlbum}"`);
logger.debug(`[Scanner] PARTIAL MATCH: job ${job.id}`);
logger.debug(
`[Scanner] Job: "${jobArtist}" - "${jobAlbum}"`
);
return true;
}
}
@@ -376,59 +349,79 @@ export class MusicScannerService {
// If the album title matches exactly, this track is likely a featured artist on a discovery album
for (const job of discoveryJobs) {
const metadata = job.metadata as any;
const jobAlbum = this.normalizeForMatching(metadata?.albumTitle || "");
const jobAlbum = this.normalizeForMatching(
metadata?.albumTitle || ""
);
if (jobAlbum === normalizedAlbum && normalizedAlbum.length > 3) {
console.log(`[Scanner] ALBUM-ONLY MATCH (featured artist): job ${job.id}`);
console.log(`[Scanner] Track artist "${normalizedArtist}" is likely featured on "${jobAlbum}"`);
if (
jobAlbum === normalizedAlbum &&
normalizedAlbum.length > 3
) {
logger.debug(
`[Scanner] ALBUM-ONLY MATCH (featured artist): job ${job.id}`
);
logger.debug(
`[Scanner] Track artist "${normalizedArtist}" is likely featured on "${jobAlbum}"`
);
return true;
}
}
// Pass 4: Check DiscoveryAlbum table (for already processed albums) by album title
const discoveryAlbumByTitle = await prisma.discoveryAlbum.findFirst({
where: {
albumTitle: { equals: albumTitle, mode: "insensitive" },
status: { in: ["ACTIVE", "LIKED"] },
},
});
const discoveryAlbumByTitle = await prisma.discoveryAlbum.findFirst(
{
where: {
albumTitle: { equals: albumTitle, mode: "insensitive" },
status: { in: ["ACTIVE", "LIKED"] },
},
}
);
if (discoveryAlbumByTitle) {
console.log(`[Scanner] DiscoveryAlbum match (by title): ${discoveryAlbumByTitle.id}`);
logger.debug(
`[Scanner] DiscoveryAlbum match (by title): ${discoveryAlbumByTitle.id}`
);
return true;
}
// Pass 5: Check if artist name matches any discovery album
// This catches cases where Lidarr downloads a different album than requested
// e.g., requested "Broods - Broods" but got "Broods - Evergreen"
const discoveryAlbumByArtist = await prisma.discoveryAlbum.findFirst({
where: {
artistName: { equals: artistName, mode: "insensitive" },
status: { in: ["ACTIVE", "LIKED", "DELETED"] }, // Include DELETED to catch cleanup scenarios
},
});
const discoveryAlbumByArtist =
await prisma.discoveryAlbum.findFirst({
where: {
artistName: { equals: artistName, mode: "insensitive" },
status: { in: ["ACTIVE", "LIKED", "DELETED"] }, // Include DELETED to catch cleanup scenarios
},
});
if (discoveryAlbumByArtist) {
// Double-check: only match if this artist has NO library albums yet
// This prevents marking albums from artists that exist in both library and discovery
const existingLibraryAlbum = await prisma.album.findFirst({
where: {
artist: { name: { equals: artistName, mode: "insensitive" } },
artist: {
name: { equals: artistName, mode: "insensitive" },
},
location: "LIBRARY",
},
});
if (!existingLibraryAlbum) {
console.log(`[Scanner] DiscoveryAlbum match (by artist): ${discoveryAlbumByArtist.id}`);
console.log(`[Scanner] Artist "${artistName}" is a discovery-only artist`);
logger.debug(
`[Scanner] DiscoveryAlbum match (by artist): ${discoveryAlbumByArtist.id}`
);
logger.debug(
`[Scanner] Artist "${artistName}" is a discovery-only artist`
);
return true;
}
}
console.log(`[Scanner] No discovery match found`);
logger.debug(`[Scanner] No discovery match found`);
return false;
} catch (error) {
console.error(`[Scanner] Error checking discovery status:`, error);
logger.error(`[Scanner] Error checking discovery status:`, error);
return false;
}
}
@@ -489,17 +482,36 @@ export class MusicScannerService {
let rawArtistName =
metadata.common.albumartist ||
metadata.common.artist ||
"Unknown Artist";
"";
// Folder fallback: If metadata is empty, try to parse from folder structure
if (!rawArtistName || rawArtistName.trim() === "") {
const folderPath = path.dirname(relativePath);
const folderName = path.basename(folderPath);
const parsedArtist = parseArtistFromPath(folderName);
if (parsedArtist) {
logger.debug(
`[Scanner] No metadata artist found, using folder: "${folderName}" -> "${parsedArtist}"`
);
rawArtistName = parsedArtist;
} else {
rawArtistName = "Unknown Artist";
logger.warn(
`[Scanner] Unknown Artist assigned for: ${relativePath} (no metadata, folder parse failed: "${folderName}")`
);
}
}
const albumTitle = metadata.common.album || "Unknown Album";
const year = metadata.common.year || null;
// ALWAYS extract primary artist first - this handles both:
// - Featured artists: "Artist A feat. Artist B" -> "Artist A"
// - Featured artists: "Artist A feat. Artist B" -> "Artist A"
// - Collaborations: "Artist A & Artist B" -> "Artist A"
// Band names like "Of Mice & Men" are preserved because extractPrimaryArtist
// only splits on " feat.", " ft.", " featuring ", " & ", etc. (with spaces)
const extractedPrimaryArtist = this.extractPrimaryArtist(rawArtistName);
const extractedPrimaryArtist = extractPrimaryArtist(rawArtistName);
let artistName = extractedPrimaryArtist;
// Canonicalize Various Artists variations (VA, V.A., <Various Artists>, etc.)
@@ -511,7 +523,7 @@ export class MusicScannerService {
let artist = await prisma.artist.findFirst({
where: { normalizedName: normalizedPrimaryName },
});
// If no match with primary name and we actually extracted something,
// also try the full raw name (for bands like "Of Mice & Men")
if (!artist && extractedPrimaryArtist !== rawArtistName) {
@@ -531,11 +543,15 @@ export class MusicScannerService {
// If we found an artist, optionally update to better capitalization
if (artist && artist.name !== artistName) {
// Check if the new name has better capitalization (starts with uppercase)
const currentNameIsLowercase = artist.name[0] === artist.name[0].toLowerCase();
const newNameIsCapitalized = artistName[0] === artistName[0].toUpperCase();
const currentNameIsLowercase =
artist.name[0] === artist.name[0].toLowerCase();
const newNameIsCapitalized =
artistName[0] === artistName[0].toUpperCase();
if (currentNameIsLowercase && newNameIsCapitalized) {
console.log(`Updating artist name capitalization: "${artist.name}" -> "${artistName}"`);
logger.debug(
`Updating artist name capitalization: "${artist.name}" -> "${artistName}"`
);
artist = await prisma.artist.update({
where: { id: artist.id },
data: { name: artistName },
@@ -550,17 +566,27 @@ export class MusicScannerService {
where: {
normalizedName: {
// Get artists whose normalized names start with similar prefix
startsWith: normalizedArtistName.substring(0, Math.min(3, normalizedArtistName.length)),
startsWith: normalizedArtistName.substring(
0,
Math.min(3, normalizedArtistName.length)
),
},
},
select: { id: true, name: true, normalizedName: true, mbid: true },
select: {
id: true,
name: true,
normalizedName: true,
mbid: true,
},
});
// Check for fuzzy matches
for (const candidate of similarArtists) {
if (areArtistNamesSimilar(artistName, candidate.name, 95)) {
console.log(`Fuzzy match found: "${artistName}" -> "${candidate.name}"`);
artist = candidate;
logger.debug(
`Fuzzy match found: "${artistName}" -> "${candidate.name}"`
);
artist = candidate as any;
break;
}
}
@@ -579,13 +605,15 @@ export class MusicScannerService {
const tempArtist = await prisma.artist.findFirst({
where: {
normalizedName: normalizedArtistName,
mbid: { startsWith: 'temp-' },
mbid: { startsWith: "temp-" },
},
});
if (tempArtist) {
// Consolidate: update temp artist to real MBID
console.log(`[SCANNER] Consolidating temp artist "${tempArtist.name}" with real MBID: ${artistMbid}`);
logger.debug(
`[SCANNER] Consolidating temp artist "${tempArtist.name}" with real MBID: ${artistMbid}`
);
artist = await prisma.artist.update({
where: { id: tempArtist.id },
data: { mbid: artistMbid },
@@ -635,8 +663,11 @@ export class MusicScannerService {
// 2. Check if artist+album matches a discovery download job
// 3. Check if artist is a discovery-only artist (has DISCOVER albums but no LIBRARY albums)
const isDiscoveryByPath = this.isDiscoveryPath(relativePath);
const isDiscoveryByJob = await this.isDiscoveryDownload(artistName, albumTitle);
const isDiscoveryByJob = await this.isDiscoveryDownload(
artistName,
albumTitle
);
// Check if this artist is discovery-only (has no LIBRARY albums)
// If so, any new albums from them should also be DISCOVER
let isDiscoveryArtist = false;
@@ -645,18 +676,23 @@ export class MusicScannerService {
where: { artistId: artist.id },
select: { location: true },
});
// Artist is discovery-only if they have albums but NONE are LIBRARY
if (artistAlbums.length > 0) {
const hasLibraryAlbums = artistAlbums.some(a => a.location === "LIBRARY");
const hasLibraryAlbums = artistAlbums.some(
(a) => a.location === "LIBRARY"
);
isDiscoveryArtist = !hasLibraryAlbums;
if (isDiscoveryArtist) {
console.log(`[Scanner] Discovery-only artist detected: ${artistName}`);
logger.debug(
`[Scanner] Discovery-only artist detected: ${artistName}`
);
}
}
}
const isDiscoveryAlbum = isDiscoveryByPath || isDiscoveryByJob || isDiscoveryArtist;
const isDiscoveryAlbum =
isDiscoveryByPath || isDiscoveryByJob || isDiscoveryArtist;
album = await prisma.album.create({
data: {
@@ -709,10 +745,11 @@ export class MusicScannerService {
}
if (needsExtraction) {
const coverPath = await this.coverArtExtractor.extractCoverArt(
absolutePath,
album.id
);
const coverPath =
await this.coverArtExtractor.extractCoverArt(
absolutePath,
album.id
);
if (coverPath) {
await prisma.album.update({
where: { id: album.id },
@@ -721,10 +758,11 @@ export class MusicScannerService {
} else {
// No embedded art, try fetching from Deezer
try {
const deezerCover = await deezerService.getAlbumCover(
artistName,
albumTitle
);
const deezerCover =
await deezerService.getAlbumCover(
artistName,
albumTitle
);
if (deezerCover) {
await prisma.album.update({
where: { id: album.id },

View File

@@ -1,4 +1,5 @@
import axios, { AxiosInstance } from "axios";
import { logger } from "../utils/logger";
import { redisClient } from "../utils/redis";
import { rateLimiter } from "./rateLimiter";
@@ -27,7 +28,7 @@ class MusicBrainzService {
return JSON.parse(cached);
}
} catch (err) {
console.warn("Redis get error:", err);
logger.warn("Redis get error:", err);
}
// Use global rate limiter instead of local rate limiting
@@ -39,7 +40,7 @@ class MusicBrainzService {
const actualTtl = data === null ? 3600 : ttlSeconds;
await redisClient.setEx(cacheKey, actualTtl, JSON.stringify(data));
} catch (err) {
console.warn("Redis set error:", err);
logger.warn("Redis set error:", err);
}
return data;
@@ -343,10 +344,10 @@ class MusicBrainzService {
const allRecordings = response.data.recordings || [];
console.log(
logger.debug(
`[MusicBrainz] Query: "${trackTitle}" by "${artistName}"`
);
console.log(
logger.debug(
`[MusicBrainz] Found ${allRecordings.length} total recordings`
);
@@ -358,7 +359,7 @@ class MusicBrainzService {
.slice(0, 2)
.map((r: any) => r["release-group"]?.title || "?")
.join(", ");
console.log(
logger.debug(
` ${i + 1}. [${disambig}] → ${
albumNames || "(no albums)"
}`
@@ -378,7 +379,7 @@ class MusicBrainzService {
return true;
});
console.log(
logger.debug(
`[MusicBrainz] After filtering live/demo: ${recordings.length} studio recordings`
);
@@ -425,20 +426,28 @@ class MusicBrainzService {
const strippedArtist = this.stripPunctuation(artistName);
if (strippedTitle !== normalizedTitle) {
console.log(`[MusicBrainz] Trying punctuation-stripped search: "${strippedTitle}" by ${strippedArtist}`);
logger.debug(
`[MusicBrainz] Trying punctuation-stripped search: "${strippedTitle}" by ${strippedArtist}`
);
const strippedQuery = `${strippedTitle} AND artist:${strippedArtist}`;
const strippedResponse = await this.client.get("/recording", {
params: {
query: strippedQuery,
limit: 10,
fmt: "json",
inc: "releases+release-groups+artists",
},
});
const strippedResponse = await this.client.get(
"/recording",
{
params: {
query: strippedQuery,
limit: 10,
fmt: "json",
inc: "releases+release-groups+artists",
},
}
);
const strippedRecordings = strippedResponse.data.recordings || [];
console.log(`[MusicBrainz] Punctuation-stripped search found ${strippedRecordings.length} recordings`);
const strippedRecordings =
strippedResponse.data.recordings || [];
logger.debug(
`[MusicBrainz] Punctuation-stripped search found ${strippedRecordings.length} recordings`
);
for (const rec of strippedRecordings) {
const recArtist =
@@ -448,11 +457,18 @@ class MusicBrainzService {
if (
recArtist
.toLowerCase()
.includes(strippedArtist.toLowerCase().split(" ")[0])
.includes(
strippedArtist
.toLowerCase()
.split(" ")[0]
)
) {
const result = this.extractAlbumFromRecording(rec);
const result =
this.extractAlbumFromRecording(rec);
if (result) {
console.log(`[MusicBrainz] ✓ Found via punctuation-stripped search: ${result.albumName}`);
logger.debug(
`[MusicBrainz] Found via punctuation-stripped search: ${result.albumName}`
);
return result;
}
}
@@ -464,34 +480,45 @@ class MusicBrainzService {
// Try each recording until we find one with a good (non-bootleg) album
for (const rec of recordings) {
const disambig = rec.disambiguation || "(no disambiguation)";
console.log(`[MusicBrainz] Trying recording: "${rec.title}" [${disambig}]`);
const disambig =
rec.disambiguation || "(no disambiguation)";
logger.debug(
`[MusicBrainz] Trying recording: "${rec.title}" [${disambig}]`
);
const result = this.extractAlbumFromRecording(rec, false);
if (result) {
console.log(`[MusicBrainz] ✓ Found album: "${result.albumName}" (MBID: ${result.albumMbid})`);
logger.debug(
`[MusicBrainz] Found album: "${result.albumName}" (MBID: ${result.albumMbid})`
);
return result; // Found a good album
} else {
console.log(`[MusicBrainz] ✗ No valid album found for this recording`);
logger.debug(
`[MusicBrainz] No valid album found for this recording`
);
}
}
// Fallback: Try again accepting Singles/EPs as last resort
console.log(`[MusicBrainz] No official albums found, trying to find Singles/EPs...`);
logger.debug(
`[MusicBrainz] No official albums found, trying to find Singles/EPs...`
);
for (const rec of recordings) {
const result = this.extractAlbumFromRecording(rec, true);
if (result) {
console.log(`[MusicBrainz] ✓ Found Single/EP: "${result.albumName}" (MBID: ${result.albumMbid})`);
logger.debug(
`[MusicBrainz] Found Single/EP: "${result.albumName}" (MBID: ${result.albumMbid})`
);
return result;
}
}
// No good albums found in any recording
console.log(
logger.debug(
`[MusicBrainz] No official albums or singles found for "${trackTitle}" by ${artistName} (checked ${recordings.length} recordings)`
);
return null;
} catch (error: any) {
console.error(
logger.error(
"MusicBrainz recording search error:",
error.message
);
@@ -505,7 +532,10 @@ class MusicBrainzService {
* Prioritizes studio albums and filters out compilations, live albums, and bootlegs
* @param allowSingles - If true, accepts Singles/EPs as a fallback (lower threshold)
*/
private extractAlbumFromRecording(recording: any, allowSingles: boolean = false): {
private extractAlbumFromRecording(
recording: any,
allowSingles: boolean = false
): {
albumName: string;
albumMbid: string;
artistMbid: string;
@@ -582,10 +612,12 @@ class MusicBrainzService {
r.release["release-group"]?.title || r.release.title;
return `"${title}" (${r.score})`;
});
console.log(
logger.debug(
`[MusicBrainz] Skipping recording - no ${modeText} found in ${
releases.length
} releases (threshold: ${threshold}). Top scores: ${topScores.join(", ")}`
} releases (threshold: ${threshold}). Top scores: ${topScores.join(
", "
)}`
);
return null;
}
@@ -597,7 +629,7 @@ class MusicBrainzService {
return null;
}
console.log(
logger.debug(
`[MusicBrainz] Selected "${releaseGroup.title}" (score: ${bestResult.score}) from ${releases.length} releases`
);
@@ -614,14 +646,19 @@ class MusicBrainzService {
* Clear cached recording search result
* Useful for retrying failed lookups
*/
async clearRecordingCache(trackTitle: string, artistName: string): Promise<boolean> {
async clearRecordingCache(
trackTitle: string,
artistName: string
): Promise<boolean> {
const cacheKey = `mb:search:recording:${artistName}:${trackTitle}`;
try {
await redisClient.del(cacheKey);
console.log(`[MusicBrainz] Cleared cache for: "${trackTitle}" by ${artistName}`);
logger.debug(
`[MusicBrainz] Cleared cache for: "${trackTitle}" by ${artistName}`
);
return true;
} catch (err) {
console.warn("Redis del error:", err);
logger.warn("Redis del error:", err);
return false;
}
}
@@ -644,13 +681,91 @@ class MusicBrainzService {
}
}
console.log(`[MusicBrainz] Cleared ${cleared} stale null cache entries`);
logger.debug(
`[MusicBrainz] Cleared ${cleared} stale null cache entries`
);
return cleared;
} catch (err) {
console.error("Error clearing stale caches:", err);
logger.error("Error clearing stale caches:", err);
return 0;
}
}
/**
* Get track list for an album by release group MBID
* Uses the first official release from the release group
*/
async getAlbumTracks(
rgMbid: string
): Promise<Array<{ title: string; position?: number; duration?: number }>> {
const cacheKey = `mb:albumtracks:${rgMbid}`;
return this.cachedRequest(cacheKey, async () => {
try {
// Step 1: Get releases from the release group
const rgResponse = await this.client.get(
`/release-group/${rgMbid}`,
{
params: {
inc: "releases",
fmt: "json",
},
}
);
const releases = rgResponse.data?.releases || [];
if (releases.length === 0) {
logger.debug(
`[MusicBrainz] No releases found for release group ${rgMbid}`
);
return [];
}
// Prefer official releases
const release =
releases.find((r: any) => r.status === "Official") ||
releases[0];
// Step 2: Get full release details with recordings
const releaseResponse = await this.client.get(
`/release/${release.id}`,
{
params: {
inc: "recordings",
fmt: "json",
},
}
);
const media = releaseResponse.data?.media || [];
const tracks: Array<{
title: string;
position?: number;
duration?: number;
}> = [];
for (const medium of media) {
for (const track of medium.tracks || []) {
tracks.push({
title: track.title || track.recording?.title,
position: track.position,
duration: track.length || track.recording?.length,
});
}
}
logger.debug(
`[MusicBrainz] Found ${tracks.length} tracks for release group ${rgMbid}`
);
return tracks;
} catch (error: any) {
logger.error(
`MusicBrainz getAlbumTracks error: ${error.message}`
);
return [];
}
});
}
}
export const musicBrainzService = new MusicBrainzService();

View File

@@ -0,0 +1,421 @@
/**
* Notification Policy Service
*
* Intelligent notification filtering for download jobs.
* Suppresses intermediate failures during active retry cycles,
* only sending notifications for terminal states (completed/exhausted).
*
* State Machine: PENDING → PROCESSING → COMPLETED/EXHAUSTED
*
* Policy:
* - SUPPRESS: All failures during active retry window
* - SEND: Final success, permanent failure after retries exhausted
*/
import { logger } from "../utils/logger";
import { prisma } from "../utils/db";
interface NotificationDecision {
shouldNotify: boolean;
reason: string;
notificationType?: "download_complete" | "download_failed";
}
// Configuration constants
const DEFAULT_RETRY_WINDOW_MINUTES = 30;
const SUPPRESS_TRANSIENT_FAILURES = true;
// Failure classification patterns
const TRANSIENT_PATTERNS = [
"no sources found",
"no indexer results",
"no releases available",
"import failed",
"connection timeout",
"rate limited",
"temporarily unavailable",
"searching for alternative",
"download stuck",
];
const PERMANENT_PATTERNS = [
"all releases exhausted",
"all albums exhausted",
"artist not found",
"download cancelled",
"album not found in lidarr",
];
const CRITICAL_PATTERNS = [
"disk full",
"permission denied",
"lidarr unavailable",
"authentication failed",
"invalid api key",
];
type FailureClassification = "transient" | "permanent" | "critical";
class NotificationPolicyService {
/**
* Evaluate whether a notification should be sent for a download job.
*
* @param jobId - The download job ID
* @param eventType - The type of event (complete, failed, retry, timeout)
* @returns Decision on whether to send notification
*/
async evaluateNotification(
jobId: string,
eventType: "complete" | "failed" | "retry" | "timeout"
): Promise<NotificationDecision> {
logger.debug(
`[NOTIFICATION-POLICY] Evaluating: ${jobId} (${eventType})`
);
// Fetch job with current state
const job = await prisma.downloadJob.findUnique({
where: { id: jobId },
});
if (!job) {
return {
shouldNotify: false,
reason: "Job not found",
};
}
const metadata = (job.metadata as any) || {};
const downloadType = metadata.downloadType || "library";
// Discovery and Spotify Import jobs never send individual notifications
// (they send batch notifications instead)
if (downloadType === "discovery" || metadata.spotifyImportJobId) {
return {
shouldNotify: false,
reason: `${downloadType} download - batch notification only`,
};
}
// Check if notification already sent for this job
if (metadata.notificationSent === true) {
return {
shouldNotify: false,
reason: "Notification already sent for this job",
};
}
// Handle based on job status
switch (job.status) {
case "completed":
return await this.evaluateCompletedJob(job, eventType);
case "processing":
return await this.evaluateProcessingJob(job, eventType);
case "failed":
case "exhausted":
return await this.evaluateFailedJob(job, eventType);
case "pending":
return {
shouldNotify: false,
reason: "Job not started yet",
};
default:
return {
shouldNotify: false,
reason: `Unknown status: ${job.status}`,
};
}
}
/**
* Evaluate notification for completed job
*/
private async evaluateCompletedJob(
job: any,
eventType: string
): Promise<NotificationDecision> {
if (eventType !== "complete") {
return {
shouldNotify: false,
reason: "Invalid event type for completed job",
};
}
// Check if another job for same album already notified
const hasOtherNotification = await this.hasAlreadyNotified(job);
if (hasOtherNotification) {
return {
shouldNotify: false,
reason: "Another job for same album already sent notification",
};
}
return {
shouldNotify: true,
reason: "Download completed successfully",
notificationType: "download_complete",
};
}
/**
* Evaluate notification for processing job
*/
private async evaluateProcessingJob(
job: any,
eventType: string
): Promise<NotificationDecision> {
// Processing jobs should never send notifications
// They're still in active retry window
if (eventType === "complete") {
return {
shouldNotify: false,
reason: "Job still processing - wait for status update to completed",
};
}
if (eventType === "failed" || eventType === "retry") {
// Check if in retry window
const inRetryWindow = await this.isInRetryWindow(job);
if (inRetryWindow) {
return {
shouldNotify: false,
reason: "Job in active retry window - suppressing notification",
};
}
// Retry window expired but still processing - extend it
return {
shouldNotify: false,
reason: "Retry window expired but job still processing - extending timeout",
};
}
if (eventType === "timeout") {
const inRetryWindow = await this.isInRetryWindow(job);
if (inRetryWindow) {
return {
shouldNotify: false,
reason: "Still in retry window - extending timeout",
};
}
// Timeout expired and out of retry window - let caller handle failure
return {
shouldNotify: false,
reason: "Timeout expired - caller should mark as failed",
};
}
return {
shouldNotify: false,
reason: "Processing job - no notification needed",
};
}
/**
* Evaluate notification for failed/exhausted job
*/
private async evaluateFailedJob(
job: any,
eventType: string
): Promise<NotificationDecision> {
if (eventType !== "failed" && eventType !== "timeout") {
return {
shouldNotify: false,
reason: "Invalid event type for failed job",
};
}
// Check if another job for same album already notified
const hasOtherNotification = await this.hasAlreadyNotified(job);
if (hasOtherNotification) {
return {
shouldNotify: false,
reason: "Another job for same album already sent notification",
};
}
// Classify the failure
const classification = this.classifyFailure(
job,
job.error || "Unknown error"
);
// Critical errors always notify
if (classification === "critical") {
return {
shouldNotify: true,
reason: "Critical error requires user intervention",
notificationType: "download_failed",
};
}
// Transient failures - suppress if configured
if (classification === "transient" && SUPPRESS_TRANSIENT_FAILURES) {
return {
shouldNotify: false,
reason: "Transient failure - suppressed (may succeed on retry)",
};
}
// Permanent failures or transient with suppress disabled
return {
shouldNotify: true,
reason:
classification === "permanent"
? "Permanent failure after retries exhausted"
: "Failure notification (transient suppression disabled)",
notificationType: "download_failed",
};
}
/**
* Check if job is in active retry window
* A job is in retry window if:
* 1. Status is 'processing'
* 2. Started within the last RETRY_WINDOW_MINUTES
*/
private async isInRetryWindow(job: any): Promise<boolean> {
if (job.status !== "processing") {
return false;
}
const metadata = (job.metadata as any) || {};
// Get retry window duration (configurable per job or use default)
const retryWindowMinutes =
metadata.retryWindowMinutes || DEFAULT_RETRY_WINDOW_MINUTES;
// Get start time
const startedAt = metadata.startedAt
? new Date(metadata.startedAt)
: job.createdAt;
// Calculate if window has expired
const windowMs = retryWindowMinutes * 60 * 1000;
const elapsed = Date.now() - startedAt.getTime();
if (elapsed > windowMs) {
logger.debug(
`[NOTIFICATION-POLICY] Retry window expired (${Math.round(
elapsed / 60000
)}m > ${retryWindowMinutes}m)`
);
return false;
}
logger.debug(
`[NOTIFICATION-POLICY] In retry window (${Math.round(
elapsed / 60000
)}m < ${retryWindowMinutes}m)`
);
return true;
}
/**
* Check if another job for the same artist+album has already sent a notification
* Prevents duplicate notifications when multiple jobs exist for same album
*/
private async hasAlreadyNotified(job: any): Promise<boolean> {
const metadata = (job.metadata as any) || {};
const artistName = metadata?.artistName?.toLowerCase().trim() || "";
const albumTitle = metadata?.albumTitle?.toLowerCase().trim() || "";
if (!artistName || !albumTitle) {
return false;
}
// Find other jobs for same album that have notified
const otherNotifiedJob = await prisma.downloadJob.findFirst({
where: {
id: { not: job.id },
userId: job.userId,
status: { in: ["completed", "failed", "exhausted"] },
},
});
if (otherNotifiedJob) {
const otherMeta = (otherNotifiedJob.metadata as any) || {};
const otherArtist =
otherMeta?.artistName?.toLowerCase().trim() || "";
const otherAlbum =
otherMeta?.albumTitle?.toLowerCase().trim() || "";
// Check if same album and notification was sent
if (
otherArtist === artistName &&
otherAlbum === albumTitle &&
otherMeta?.notificationSent === true
) {
logger.debug(
`[NOTIFICATION-POLICY] Found duplicate notification in job ${otherNotifiedJob.id}`
);
return true;
}
}
return false;
}
/**
* Classify failure type based on error message
* @returns 'transient' | 'permanent' | 'critical'
*/
private classifyFailure(job: any, error: string): FailureClassification {
const errorLower = error.toLowerCase();
// Check critical patterns first
for (const pattern of CRITICAL_PATTERNS) {
if (errorLower.includes(pattern)) {
logger.debug(
`[NOTIFICATION-POLICY] Classified as CRITICAL: ${pattern}`
);
return "critical";
}
}
// Check permanent patterns
for (const pattern of PERMANENT_PATTERNS) {
if (errorLower.includes(pattern)) {
logger.debug(
`[NOTIFICATION-POLICY] Classified as PERMANENT: ${pattern}`
);
return "permanent";
}
}
// Check transient patterns
for (const pattern of TRANSIENT_PATTERNS) {
if (errorLower.includes(pattern)) {
logger.debug(
`[NOTIFICATION-POLICY] Classified as TRANSIENT: ${pattern}`
);
return "transient";
}
}
// Default to transient if unknown
logger.debug(
`[NOTIFICATION-POLICY] Classified as TRANSIENT (default)`
);
return "transient";
}
/**
* Get configuration for notification policy
* Can be extended to pull from user settings or system config
*/
getConfig(): {
retryWindowMinutes: number;
suppressTransientFailures: boolean;
} {
return {
retryWindowMinutes: DEFAULT_RETRY_WINDOW_MINUTES,
suppressTransientFailures: SUPPRESS_TRANSIENT_FAILURES,
};
}
}
// Singleton instance
export const notificationPolicyService = new NotificationPolicyService();

View File

@@ -1,4 +1,5 @@
import { PrismaClient } from "@prisma/client";
import { logger } from "../utils/logger";
const prisma = new PrismaClient();
@@ -35,7 +36,7 @@ class NotificationService {
},
});
console.log(
logger.debug(
`[NOTIFICATION] Created: ${type} - ${title} for user ${userId}`
);
return notification;
@@ -124,7 +125,7 @@ class NotificationService {
});
if (result.count > 0) {
console.log(
logger.debug(
`[NOTIFICATION] Cleaned up ${result.count} old notifications`
);
}

View File

@@ -1,4 +1,5 @@
import axios, { AxiosInstance } from "axios";
import { logger } from "../utils/logger";
import { config } from "../config";
interface PlaylistTrack {
@@ -131,14 +132,14 @@ Return ONLY valid JSON, no markdown formatting.`;
return result.tracks || [];
} catch (error: any) {
console.error(
logger.error(
"OpenAI API error:",
error.response?.data || error.message
);
// Log the raw response content for debugging
if (error instanceof SyntaxError) {
console.error("Failed to parse JSON response");
logger.error("Failed to parse JSON response");
}
throw new Error("Failed to generate playlist with AI");
@@ -175,7 +176,7 @@ Be concise and engaging (max 15 words).`;
return response.data.choices[0].message.content.trim();
} catch (error) {
console.error("OpenAI enhancement error:", error);
logger.error("OpenAI enhancement error:", error);
return "Recommended based on your listening history";
}
}

View File

@@ -1,4 +1,5 @@
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import fs from "fs/promises";
import path from "path";
import { config } from "../config";
@@ -40,7 +41,7 @@ export class PodcastCacheService {
};
try {
console.log(" Starting podcast cover sync...");
logger.debug(" Starting podcast cover sync...");
// Ensure cover cache directory exists
await fs.mkdir(this.coverCacheDir, { recursive: true });
@@ -53,7 +54,7 @@ export class PodcastCacheService {
},
});
console.log(
logger.debug(
`[PODCAST] Found ${podcasts.length} podcasts needing cover sync`
);
@@ -72,7 +73,7 @@ export class PodcastCacheService {
data: { localCoverPath: localPath },
});
result.synced++;
console.log(` Synced cover for: ${podcast.title}`);
logger.debug(` Synced cover for: ${podcast.title}`);
} else {
result.skipped++;
}
@@ -81,18 +82,18 @@ export class PodcastCacheService {
result.failed++;
const errorMsg = `Failed to sync cover for ${podcast.title}: ${error.message}`;
result.errors.push(errorMsg);
console.error(` ${errorMsg}`);
logger.error(` ${errorMsg}`);
}
}
console.log("\nPodcast Cover Sync Summary:");
console.log(` Synced: ${result.synced}`);
console.log(` Failed: ${result.failed}`);
console.log(` Skipped: ${result.skipped}`);
logger.debug("\nPodcast Cover Sync Summary:");
logger.debug(` Synced: ${result.synced}`);
logger.debug(` Failed: ${result.failed}`);
logger.debug(` Skipped: ${result.skipped}`);
return result;
} catch (error: any) {
console.error(" Podcast cover sync failed:", error);
logger.error(" Podcast cover sync failed:", error);
throw error;
}
}
@@ -109,7 +110,7 @@ export class PodcastCacheService {
};
try {
console.log(" Starting podcast episode cover sync...");
logger.debug(" Starting podcast episode cover sync...");
await fs.mkdir(this.coverCacheDir, { recursive: true });
@@ -133,7 +134,7 @@ export class PodcastCacheService {
(ep) => ep.imageUrl !== ep.podcast.imageUrl
);
console.log(
logger.debug(
`[PODCAST] Found ${uniqueEpisodes.length} episodes with unique covers`
);
@@ -152,7 +153,7 @@ export class PodcastCacheService {
data: { localCoverPath: localPath },
});
result.synced++;
console.log(
logger.debug(
` Synced cover for episode: ${episode.title}`
);
} else {
@@ -163,18 +164,18 @@ export class PodcastCacheService {
result.failed++;
const errorMsg = `Failed to sync cover for episode ${episode.title}: ${error.message}`;
result.errors.push(errorMsg);
console.error(` ${errorMsg}`);
logger.error(` ${errorMsg}`);
}
}
console.log("\nEpisode Cover Sync Summary:");
console.log(` Synced: ${result.synced}`);
console.log(` Failed: ${result.failed}`);
console.log(` Skipped: ${result.skipped}`);
logger.debug("\nEpisode Cover Sync Summary:");
logger.debug(` Synced: ${result.synced}`);
logger.debug(` Failed: ${result.failed}`);
logger.debug(` Skipped: ${result.skipped}`);
return result;
} catch (error: any) {
console.error(" Episode cover sync failed:", error);
logger.error(" Episode cover sync failed:", error);
throw error;
}
}
@@ -204,7 +205,7 @@ export class PodcastCacheService {
return filePath;
} catch (error: any) {
console.error(
logger.error(
`Failed to download cover for ${type} ${id}:`,
error.message
);
@@ -240,7 +241,7 @@ export class PodcastCacheService {
if (!validCoverPaths.has(file)) {
await fs.unlink(path.join(this.coverCacheDir, file));
deleted++;
console.log(` [DELETE] Deleted orphaned podcast cover: ${file}`);
logger.debug(` [DELETE] Deleted orphaned podcast cover: ${file}`);
}
}

View File

@@ -1,4 +1,5 @@
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import { config } from "../config";
import fs from "fs/promises";
import path from "path";
@@ -53,7 +54,7 @@ export function getDownloadProgress(episodeId: string): { progress: number; down
export async function getCachedFilePath(episodeId: string): Promise<string | null> {
// Don't return cache path if still downloading - file may be incomplete
if (downloadingEpisodes.has(episodeId)) {
console.log(`[PODCAST-DL] Episode ${episodeId} is still downloading, not using cache`);
logger.debug(`[PODCAST-DL] Episode ${episodeId} is still downloading, not using cache`);
return null;
}
@@ -78,7 +79,7 @@ export async function getCachedFilePath(episodeId: string): Promise<string | nul
const actual = stats.size;
const variance = Math.abs(actual - expected) / expected;
if (variance > 0.01) {
console.log(
logger.debug(
`[PODCAST-DL] Episode size mismatch vs episode.fileSize for ${episodeId}: actual ${actual} vs expected ${expected} (variance ${Math.round(
variance * 100
)}%), deleting cache`
@@ -101,7 +102,7 @@ export async function getCachedFilePath(episodeId: string): Promise<string | nul
// If no DB record, file might be incomplete or stale
if (!dbRecord) {
console.log(`[PODCAST-DL] No DB record for ${episodeId}, deleting stale cache file`);
logger.debug(`[PODCAST-DL] No DB record for ${episodeId}, deleting stale cache file`);
await fs.unlink(cachedPath).catch(() => {});
return null;
}
@@ -112,7 +113,7 @@ export async function getCachedFilePath(episodeId: string): Promise<string | nul
const variance = Math.abs(actualSize - expectedSize) / expectedSize;
if (expectedSize > 0 && variance > 0.01) {
console.log(`[PODCAST-DL] Size mismatch for ${episodeId}: actual ${actualSize} vs expected ${Math.round(expectedSize)}, deleting`);
logger.debug(`[PODCAST-DL] Size mismatch for ${episodeId}: actual ${actualSize} vs expected ${Math.round(expectedSize)}, deleting`);
await fs.unlink(cachedPath).catch(() => {});
await prisma.podcastDownload.deleteMany({ where: { episodeId } });
return null;
@@ -124,7 +125,7 @@ export async function getCachedFilePath(episodeId: string): Promise<string | nul
data: { lastAccessedAt: new Date() }
});
console.log(`[PODCAST-DL] Cache valid for ${episodeId}: ${stats.size} bytes`);
logger.debug(`[PODCAST-DL] Cache valid for ${episodeId}: ${stats.size} bytes`);
return cachedPath;
}
return null;
@@ -144,7 +145,7 @@ export function downloadInBackground(
): void {
// Skip if already downloading
if (downloadingEpisodes.has(episodeId)) {
console.log(`[PODCAST-DL] Already downloading episode ${episodeId}, skipping`);
logger.debug(`[PODCAST-DL] Already downloading episode ${episodeId}, skipping`);
return;
}
@@ -154,7 +155,7 @@ export function downloadInBackground(
// Start download in background (don't await)
performDownload(episodeId, audioUrl, userId)
.catch(err => {
console.error(`[PODCAST-DL] Background download failed for ${episodeId}:`, err.message);
logger.error(`[PODCAST-DL] Background download failed for ${episodeId}:`, err.message);
})
.finally(() => {
downloadingEpisodes.delete(episodeId);
@@ -171,7 +172,7 @@ async function performDownload(
attempt: number = 1
): Promise<void> {
const maxAttempts = 3;
console.log(`[PODCAST-DL] Starting background download for episode ${episodeId} (attempt ${attempt}/${maxAttempts})`);
logger.debug(`[PODCAST-DL] Starting background download for episode ${episodeId} (attempt ${attempt}/${maxAttempts})`);
const cacheDir = getPodcastCacheDir();
@@ -187,7 +188,7 @@ async function performDownload(
const existingCached = await getCachedFilePath(episodeId);
downloadingEpisodes.add(episodeId); // Re-add
if (existingCached) {
console.log(`[PODCAST-DL] Episode ${episodeId} already cached, skipping download`);
logger.debug(`[PODCAST-DL] Episode ${episodeId} already cached, skipping download`);
return;
}
@@ -247,7 +248,7 @@ async function performDownload(
} catch {}
}
console.log(
logger.debug(
`[PODCAST-DL] Downloading ${episodeId} (${expectedBytes > 0 ? Math.round(expectedBytes / 1024 / 1024) : 0}MB)`
);
@@ -271,7 +272,7 @@ async function performDownload(
const now = Date.now();
if (now - lastLogTime > 30000) {
const percent = contentLength > 0 ? Math.round((bytesDownloaded / contentLength) * 100) : 0;
console.log(`[PODCAST-DL] Download progress ${episodeId}: ${percent}% (${Math.round(bytesDownloaded / 1024 / 1024)}MB)`);
logger.debug(`[PODCAST-DL] Download progress ${episodeId}: ${percent}% (${Math.round(bytesDownloaded / 1024 / 1024)}MB)`);
lastLogTime = now;
}
});
@@ -312,7 +313,7 @@ async function performDownload(
const variance = Math.abs(stats.size - expectedBytes) / expectedBytes;
if (variance > 0.01) {
const percentComplete = Math.round((stats.size / expectedBytes) * 100);
console.error(`[PODCAST-DL] Incomplete download for ${episodeId}: ${stats.size}/${expectedBytes} bytes (${percentComplete}%)`);
logger.error(`[PODCAST-DL] Incomplete download for ${episodeId}: ${stats.size}/${expectedBytes} bytes (${percentComplete}%)`);
await fs.unlink(tempPath).catch(() => {});
throw new Error(`Download incomplete: got ${stats.size} bytes, expected ${expectedBytes}`);
}
@@ -344,7 +345,7 @@ async function performDownload(
}
});
console.log(`[PODCAST-DL] Successfully cached episode ${episodeId} (${fileSizeMb.toFixed(1)}MB)`);
logger.debug(`[PODCAST-DL] Successfully cached episode ${episodeId} (${fileSizeMb.toFixed(1)}MB)`);
// Clean up progress tracking
downloadProgress.delete(episodeId);
@@ -356,7 +357,7 @@ async function performDownload(
// Retry on failure
if (attempt < maxAttempts) {
console.log(`[PODCAST-DL] Download failed (attempt ${attempt}), retrying in 5s: ${error.message}`);
logger.debug(`[PODCAST-DL] Download failed (attempt ${attempt}), retrying in 5s: ${error.message}`);
await new Promise(resolve => setTimeout(resolve, 5000));
return performDownload(episodeId, audioUrl, userId, attempt + 1);
}
@@ -370,7 +371,7 @@ async function performDownload(
* Should be called periodically (e.g., daily)
*/
export async function cleanupExpiredCache(): Promise<{ deleted: number; freedMb: number }> {
console.log('[PODCAST-DL] Starting cache cleanup...');
logger.debug('[PODCAST-DL] Starting cache cleanup...');
const thirtyDaysAgo = new Date();
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
@@ -398,13 +399,13 @@ export async function cleanupExpiredCache(): Promise<{ deleted: number; freedMb:
deleted++;
freedMb += download.fileSizeMb;
console.log(`[PODCAST-DL] Deleted expired cache: ${path.basename(download.localPath)}`);
logger.debug(`[PODCAST-DL] Deleted expired cache: ${path.basename(download.localPath)}`);
} catch (err: any) {
console.error(`[PODCAST-DL] Failed to delete ${download.localPath}:`, err.message);
logger.error(`[PODCAST-DL] Failed to delete ${download.localPath}:`, err.message);
}
}
console.log(`[PODCAST-DL] Cleanup complete: ${deleted} files deleted, ${freedMb.toFixed(1)}MB freed`);
logger.debug(`[PODCAST-DL] Cleanup complete: ${deleted} files deleted, ${freedMb.toFixed(1)}MB freed`);
return { deleted, freedMb };
}

View File

@@ -1,6 +1,12 @@
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import { lastFmService } from "./lastfm";
import { moodBucketService } from "./moodBucketService";
import {
getDecadeWhereClause,
getEffectiveYear,
getDecadeFromYear,
} from "../utils/dateFilters";
export interface ProgrammaticMix {
id: string;
@@ -109,10 +115,14 @@ function getMixColor(type: string): string {
return MIX_COLORS[type] || MIX_COLORS["default"];
}
// Helper to randomly sample from array
// Helper to randomly sample from array using Fisher-Yates shuffle
function randomSample<T>(array: T[], count: number): T[] {
const shuffled = [...array].sort(() => Math.random() - 0.5);
return shuffled.slice(0, count);
const result = [...array];
for (let i = result.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[result[i], result[j]] = [result[j], result[i]];
}
return result.slice(0, count);
}
// Helper to get seeded random number for daily consistency
@@ -129,7 +139,14 @@ function getSeededRandom(seed: string): number {
// Type for track with album cover
type TrackWithAlbumCover = {
id: string;
album: { coverUrl: string | null; genres?: unknown };
album: {
coverUrl: string | null;
genres?: unknown;
userGenres?: string[] | null;
artist?: {
userGenres?: string[] | null;
};
};
lastfmTags?: string[];
essentiaGenres?: string[];
[key: string]: unknown;
@@ -154,30 +171,71 @@ async function findTracksByGenrePatterns(
{ essentiaGenres: { hasSome: tagPatterns } },
],
},
include: { album: { select: { coverUrl: true, genres: true } } },
include: {
album: {
select: {
coverUrl: true,
genres: true,
userGenres: true,
artist: {
select: {
userGenres: true,
},
},
},
},
},
take: limit,
});
if (tracks.length >= 15) {
return tracks;
return tracks as TrackWithAlbumCover[];
}
// Strategy 2: Query albums with non-empty genres and filter in memory
// Strategy 2: Query albums with non-empty genres (canonical or user) and filter in memory
const albumTracks = await prisma.track.findMany({
where: {
album: {
genres: { not: { equals: null } },
OR: [
{ genres: { not: { equals: null } } },
{ userGenres: { not: { equals: null } } },
],
},
},
include: {
album: {
select: {
coverUrl: true,
genres: true,
userGenres: true,
artist: {
select: {
userGenres: true,
},
},
},
},
},
include: { album: { select: { coverUrl: true, genres: true } } },
take: limit * 3, // Get more to filter down
});
// Filter by genre patterns (case-insensitive partial match)
// Merge canonical and user genres from both album and artist
const genreMatched = albumTracks.filter((t) => {
const albumGenres = t.album.genres as string[] | null;
if (!albumGenres || !Array.isArray(albumGenres)) return false;
return albumGenres.some((ag) =>
const albumUserGenres = (t.album.userGenres as string[] | null) || [];
const artistUserGenres = (t.album.artist?.userGenres as string[] | null) || [];
// Merge all genres
const allGenres = [
...(albumGenres || []),
...albumUserGenres,
...artistUserGenres,
];
if (allGenres.length === 0) return false;
return allGenres.some((ag) =>
genrePatterns.some((gp) =>
ag.toLowerCase().includes(gp.toLowerCase())
)
@@ -191,7 +249,7 @@ async function findTracksByGenrePatterns(
...genreMatched.filter((t) => !existingIds.has(t.id)),
];
return merged.slice(0, limit);
return merged.slice(0, limit) as TrackWithAlbumCover[];
}
export class ProgrammaticPlaylistService {
@@ -218,7 +276,7 @@ export class ProgrammaticPlaylistService {
: `${today}-${userId}`;
const dateSeed = getSeededRandom(seedString);
console.log(
logger.debug(
`[MIXES] Generating mixes for user ${userId}, forceRandom: ${forceRandom}, seed: ${dateSeed}`
);
@@ -444,7 +502,7 @@ export class ProgrammaticPlaylistService {
const selectedIndices: number[] = [];
let seed = dateSeed;
console.log(
logger.debug(
`[MIXES] Selecting ${this.DAILY_MIX_COUNT} mixes from ${mixGenerators.length} types...`
);
@@ -453,33 +511,33 @@ export class ProgrammaticPlaylistService {
const index = seed % mixGenerators.length;
if (!selectedIndices.includes(index)) {
selectedIndices.push(index);
console.log(
logger.debug(
`[MIXES] Selected index ${index}: ${mixGenerators[index].name}`
);
}
}
console.log(
logger.debug(
`[MIXES] Final selected indices: [${selectedIndices.join(", ")}]`
);
// Generate selected mixes
const mixPromises = selectedIndices.map((i) => {
console.log(`[MIXES] Generating ${mixGenerators[i].name}...`);
logger.debug(`[MIXES] Generating ${mixGenerators[i].name}...`);
return mixGenerators[i].fn();
});
const mixes = await Promise.all(mixPromises);
console.log(`[MIXES] Generated ${mixes.length} mixes before filtering`);
logger.debug(`[MIXES] Generated ${mixes.length} mixes before filtering`);
mixes.forEach((mix, i) => {
if (mix === null) {
console.log(
logger.debug(
`[MIXES] Mix ${i} (${
mixGenerators[selectedIndices[i]].name
}) returned NULL`
);
} else {
console.log(
logger.debug(
`[MIXES] Mix ${i}: ${mix.name} (${mix.trackCount} tracks)`
);
}
@@ -489,13 +547,13 @@ export class ProgrammaticPlaylistService {
let finalMixes = mixes.filter(
(mix): mix is ProgrammaticMix => mix !== null
);
console.log(
logger.debug(
`[MIXES] Returning ${finalMixes.length} mixes after filtering nulls`
);
// If we don't have 5 mixes, try to fill gaps with successful generators
if (finalMixes.length < this.DAILY_MIX_COUNT) {
console.log(
logger.debug(
`[MIXES] Only got ${finalMixes.length} mixes, trying to fill gaps...`
);
@@ -510,34 +568,34 @@ export class ProgrammaticPlaylistService {
i++
) {
if (!attemptedIndices.has(i)) {
console.log(
logger.debug(
`[MIXES] Attempting fallback: ${mixGenerators[i].name}`
);
const fallbackMix = await mixGenerators[i].fn();
if (fallbackMix && !successfulTypes.has(fallbackMix.type)) {
finalMixes.push(fallbackMix);
successfulTypes.add(fallbackMix.type);
console.log(
logger.debug(
`[MIXES] Fallback succeeded: ${fallbackMix.name}`
);
}
}
}
console.log(`[MIXES] After fallbacks: ${finalMixes.length} mixes`);
logger.debug(`[MIXES] After fallbacks: ${finalMixes.length} mixes`);
}
// Check if user has saved mood mix from the new bucket system (fast lookup)
try {
const savedMoodMix = await moodBucketService.getUserMoodMix(userId);
if (savedMoodMix) {
console.log(
logger.debug(
`[MIXES] User has saved mood mix: "${savedMoodMix.name}" with ${savedMoodMix.trackCount} tracks`
);
finalMixes.push(savedMoodMix);
}
} catch (err) {
console.error("[MIXES] Error getting user's saved mood mix:", err);
logger.error("[MIXES] Error getting user's saved mood mix:", err);
}
return finalMixes;
@@ -553,13 +611,14 @@ export class ProgrammaticPlaylistService {
// Get all decades
const albums = await prisma.album.findMany({
where: { tracks: { some: {} } },
select: { year: true },
select: { year: true, originalYear: true, displayYear: true },
});
const decades = new Set<number>();
albums.forEach((album) => {
if (album.year) {
const decade = Math.floor(album.year / 10) * 10;
const effectiveYear = getEffectiveYear(album);
if (effectiveYear) {
const decade = getDecadeFromYear(effectiveYear);
decades.add(decade);
}
});
@@ -574,9 +633,7 @@ export class ProgrammaticPlaylistService {
// Get ALL tracks from this decade
const tracks = await prisma.track.findMany({
where: {
album: {
year: { gte: selectedDecade, lt: selectedDecade + 10 },
},
album: getDecadeWhereClause(selectedDecade),
},
include: {
album: { select: { coverUrl: true } },
@@ -622,13 +679,13 @@ export class ProgrammaticPlaylistService {
take: 20,
});
console.log(`[GENRE MIX] Found ${genres.length} genres total`);
logger.debug(`[GENRE MIX] Found ${genres.length} genres total`);
const validGenres = genres.filter((g) => g._count.trackGenres >= 5);
console.log(
logger.debug(
`[GENRE MIX] ${validGenres.length} genres have >= 5 tracks`
);
if (validGenres.length === 0) {
console.log(`[GENRE MIX] FAILED: No genres with enough tracks`);
logger.debug(`[GENRE MIX] FAILED: No genres with enough tracks`);
return null;
}
@@ -684,11 +741,11 @@ export class ProgrammaticPlaylistService {
take: this.TRACK_LIMIT,
});
console.log(
logger.debug(
`[TOP TRACKS MIX] Found ${playStats.length} unique played tracks`
);
if (playStats.length < 5) {
console.log(
logger.debug(
`[TOP TRACKS MIX] FAILED: Only ${playStats.length} tracks (need at least 5)`
);
return null;
@@ -796,11 +853,11 @@ export class ProgrammaticPlaylistService {
},
});
console.log(
logger.debug(
`[ARTIST SIMILAR MIX] Found ${recentPlays.length} plays in last 7 days`
);
if (recentPlays.length === 0) {
console.log(`[ARTIST SIMILAR MIX] FAILED: No plays in last 7 days`);
logger.debug(`[ARTIST SIMILAR MIX] FAILED: No plays in last 7 days`);
return null;
}
@@ -824,13 +881,13 @@ export class ProgrammaticPlaylistService {
});
if (!topArtist || !topArtist.name) {
console.log(
logger.debug(
`[ARTIST SIMILAR MIX] FAILED: Top artist not found or has no name`
);
return null;
}
console.log(`[ARTIST SIMILAR MIX] Top artist: ${topArtist.name}`);
logger.debug(`[ARTIST SIMILAR MIX] Top artist: ${topArtist.name}`);
// Get similar artists from Last.fm
try {
@@ -839,7 +896,7 @@ export class ProgrammaticPlaylistService {
"10"
);
console.log(
logger.debug(
`[ARTIST SIMILAR MIX] Last.fm returned ${similarArtists.length} similar artists`
);
@@ -859,7 +916,7 @@ export class ProgrammaticPlaylistService {
},
});
console.log(
logger.debug(
`[ARTIST SIMILAR MIX] Found ${artistsInLibrary.length} similar artists in library`
);
@@ -867,12 +924,12 @@ export class ProgrammaticPlaylistService {
artist.albums.flatMap((album) => album.tracks)
);
console.log(
logger.debug(
`[ARTIST SIMILAR MIX] Total tracks from similar artists: ${tracks.length}`
);
if (tracks.length < 5) {
console.log(
logger.debug(
`[ARTIST SIMILAR MIX] FAILED: Only ${tracks.length} tracks (need at least 5)`
);
return null;
@@ -895,7 +952,7 @@ export class ProgrammaticPlaylistService {
color: getMixColor("artist-similar"),
};
} catch (error) {
console.error("Failed to generate artist similar mix:", error);
logger.error("Failed to generate artist similar mix:", error);
return null;
}
}
@@ -994,7 +1051,7 @@ export class ProgrammaticPlaylistService {
},
});
tracks = genres.flatMap((g) => g.trackGenres.map((tg) => tg.track));
console.log(
logger.debug(
`[PARTY MIX] Found ${tracks.length} tracks from Genre table`
);
@@ -1009,7 +1066,7 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[PARTY MIX] After album genre fallback: ${tracks.length} tracks`
);
}
@@ -1037,13 +1094,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...audioTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[PARTY MIX] After audio analysis fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[PARTY MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -1099,11 +1156,11 @@ export class ProgrammaticPlaylistService {
take: 100,
});
console.log(`[CHILL MIX] Enhanced mode: Found ${tracks.length} tracks`);
logger.debug(`[CHILL MIX] Enhanced mode: Found ${tracks.length} tracks`);
// Strategy 2: Standard mode fallback
if (tracks.length < this.MIN_TRACKS_DAILY) {
console.log(`[CHILL MIX] Falling back to Standard mode`);
logger.debug(`[CHILL MIX] Falling back to Standard mode`);
tracks = await prisma.track.findMany({
where: {
analysisStatus: "completed",
@@ -1125,17 +1182,17 @@ export class ProgrammaticPlaylistService {
include: { album: { select: { coverUrl: true } } },
take: 100,
});
console.log(
logger.debug(
`[CHILL MIX] Standard mode: Found ${tracks.length} tracks`
);
}
console.log(
logger.debug(
`[CHILL MIX] Total: ${tracks.length} tracks matching criteria`
);
if (tracks.length < this.MIN_TRACKS_DAILY) {
console.log(
logger.debug(
`[CHILL MIX] FAILED: Only ${tracks.length} tracks (need ${this.MIN_TRACKS_DAILY})`
);
return null;
@@ -1222,13 +1279,13 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = enhancedTracks;
console.log(
logger.debug(
`[WORKOUT MIX] Enhanced mode: Found ${tracks.length} tracks`
);
// Strategy 2: Standard mode fallback - audio analysis
if (tracks.length < 15) {
console.log(`[WORKOUT MIX] Falling back to Standard mode`);
logger.debug(`[WORKOUT MIX] Falling back to Standard mode`);
const audioTracks = await prisma.track.findMany({
where: {
analysisStatus: "completed",
@@ -1259,7 +1316,7 @@ export class ProgrammaticPlaylistService {
...tracks,
...audioTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[WORKOUT MIX] Standard mode: Total ${tracks.length} tracks`
);
}
@@ -1289,7 +1346,7 @@ export class ProgrammaticPlaylistService {
...tracks,
...genreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[WORKOUT MIX] After Genre table: ${tracks.length} tracks`
);
}
@@ -1305,13 +1362,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[WORKOUT MIX] After album genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[WORKOUT MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -1383,7 +1440,7 @@ export class ProgrammaticPlaylistService {
},
});
tracks = genres.flatMap((g) => g.trackGenres.map((tg) => tg.track));
console.log(
logger.debug(
`[FOCUS MIX] Found ${tracks.length} tracks from Genre table`
);
@@ -1398,7 +1455,7 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[FOCUS MIX] After album genre fallback: ${tracks.length} tracks`
);
}
@@ -1419,13 +1476,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...audioTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[FOCUS MIX] After audio analysis fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[FOCUS MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -1482,7 +1539,7 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = audioTracks;
console.log(
logger.debug(
`[HIGH ENERGY MIX] Found ${tracks.length} tracks from audio analysis`
);
@@ -1507,13 +1564,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[HIGH ENERGY MIX] After genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[HIGH ENERGY MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -1573,13 +1630,13 @@ export class ProgrammaticPlaylistService {
take: 100,
});
console.log(
logger.debug(
`[LATE NIGHT MIX] Enhanced mode: Found ${tracks.length} tracks`
);
// Fallback to Standard mode if not enough Enhanced tracks
if (tracks.length < this.MIN_TRACKS_DAILY) {
console.log(`[LATE NIGHT MIX] Falling back to Standard mode`);
logger.debug(`[LATE NIGHT MIX] Falling back to Standard mode`);
tracks = await prisma.track.findMany({
where: {
analysisStatus: "completed",
@@ -1601,18 +1658,18 @@ export class ProgrammaticPlaylistService {
include: { album: { select: { coverUrl: true } } },
take: 100,
});
console.log(
logger.debug(
`[LATE NIGHT MIX] Standard mode: Found ${tracks.length} tracks`
);
}
console.log(
logger.debug(
`[LATE NIGHT MIX] Total: ${tracks.length} tracks matching criteria`
);
// No fallback padding - if not enough truly mellow tracks, don't generate
if (tracks.length < this.MIN_TRACKS_DAILY) {
console.log(
logger.debug(
`[LATE NIGHT MIX] FAILED: Only ${tracks.length} tracks (need ${this.MIN_TRACKS_DAILY})`
);
return null;
@@ -1672,7 +1729,7 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = enhancedTracks;
console.log(`[HAPPY MIX] Enhanced mode: Found ${tracks.length} tracks`);
logger.debug(`[HAPPY MIX] Enhanced mode: Found ${tracks.length} tracks`);
// Strategy 2: Standard mode fallback - valence/energy heuristics
if (tracks.length < 15) {
@@ -1690,7 +1747,7 @@ export class ProgrammaticPlaylistService {
...tracks,
...standardTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[HAPPY MIX] After Standard fallback: ${tracks.length} tracks`
);
}
@@ -1715,13 +1772,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[HAPPY MIX] After genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[HAPPY MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -1774,7 +1831,7 @@ export class ProgrammaticPlaylistService {
include: { album: { select: { coverUrl: true } } },
take: 150,
});
console.log(
logger.debug(
`[MELANCHOLY MIX] Enhanced mode: Found ${enhancedTracks.length} tracks`
);
@@ -1782,7 +1839,7 @@ export class ProgrammaticPlaylistService {
tracks = enhancedTracks;
} else {
// Strategy 2: Standard mode fallback
console.log(`[MELANCHOLY MIX] Falling back to Standard mode`);
logger.debug(`[MELANCHOLY MIX] Falling back to Standard mode`);
const audioTracks = await prisma.track.findMany({
where: {
analysisStatus: "completed",
@@ -1792,7 +1849,7 @@ export class ProgrammaticPlaylistService {
include: { album: { select: { coverUrl: true } } },
take: 150,
});
console.log(
logger.debug(
`[MELANCHOLY MIX] Standard mode: Found ${audioTracks.length} low-valence tracks`
);
@@ -1820,7 +1877,7 @@ export class ProgrammaticPlaylistService {
);
return hasMinorKey || hasSadTags || hasLastfmSadTags;
});
console.log(
logger.debug(
`[MELANCHOLY MIX] After tag filter: ${tracks.length} tracks`
);
}
@@ -1844,14 +1901,14 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[MELANCHOLY MIX] After genre fallback: ${tracks.length} tracks`
);
}
// Require minimum 15 tracks for a meaningful playlist
if (tracks.length < 15) {
console.log(
logger.debug(
`[MELANCHOLY MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -1919,7 +1976,7 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = audioTracks;
console.log(
logger.debug(
`[DANCE FLOOR MIX] Found ${tracks.length} tracks from audio analysis`
);
@@ -1943,13 +2000,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[DANCE FLOOR MIX] After genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[DANCE FLOOR MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -2002,7 +2059,7 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = audioTracks;
console.log(
logger.debug(
`[ACOUSTIC MIX] Found ${tracks.length} tracks from audio analysis`
);
@@ -2024,13 +2081,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[ACOUSTIC MIX] After genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[ACOUSTIC MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -2083,7 +2140,7 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = audioTracks;
console.log(
logger.debug(
`[INSTRUMENTAL MIX] Found ${tracks.length} tracks from audio analysis`
);
@@ -2106,13 +2163,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[INSTRUMENTAL MIX] After genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[INSTRUMENTAL MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -2226,7 +2283,7 @@ export class ProgrammaticPlaylistService {
take: 100,
});
tracks = taggedTracks;
console.log(`[ROAD TRIP MIX] Found ${tracks.length} tracks from tags`);
logger.debug(`[ROAD TRIP MIX] Found ${tracks.length} tracks from tags`);
// Strategy 2: Audio analysis (medium-high energy, good tempo)
if (tracks.length < 15) {
@@ -2244,7 +2301,7 @@ export class ProgrammaticPlaylistService {
...tracks,
...audioTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[ROAD TRIP MIX] After audio fallback: ${tracks.length} tracks`
);
}
@@ -2267,13 +2324,13 @@ export class ProgrammaticPlaylistService {
...tracks,
...albumGenreTracks.filter((t) => !existingIds.has(t.id)),
];
console.log(
logger.debug(
`[ROAD TRIP MIX] After genre fallback: ${tracks.length} tracks`
);
}
if (tracks.length < 15) {
console.log(
logger.debug(
`[ROAD TRIP MIX] FAILED: Only ${tracks.length} tracks found`
);
return null;
@@ -3582,7 +3639,7 @@ export class ProgrammaticPlaylistService {
useEnhancedMode = true;
} else {
// Not enough enhanced tracks - convert ML mood params to basic audio feature equivalents
console.log(
logger.debug(
`[MoodMixer] Only ${enhancedCount} enhanced tracks, falling back to basic features`
);

View File

@@ -6,6 +6,7 @@
*/
import PQueue from "p-queue";
import { logger } from "../utils/logger";
interface RateLimitConfig {
/** Requests per interval */
@@ -81,6 +82,7 @@ class GlobalRateLimiter {
private circuitBreakers: Map<ServiceName, CircuitState> = new Map();
private globalPaused = false;
private globalPauseUntil = 0;
private concurrencyMultiplier = 1; // 1-5 multiplier for user-configurable speed
constructor() {
// Initialize queues for each service
@@ -103,7 +105,7 @@ class GlobalRateLimiter {
});
}
console.log("Global rate limiter initialized");
logger.debug("Global rate limiter initialized");
}
/**
@@ -127,7 +129,7 @@ class GlobalRateLimiter {
// Check global pause
if (this.globalPaused && Date.now() < this.globalPauseUntil) {
const waitTime = this.globalPauseUntil - Date.now();
console.log(`Global rate limit pause - waiting ${waitTime}ms`);
logger.debug(`Global rate limit pause - waiting ${waitTime}ms`);
await this.sleep(waitTime);
}
@@ -138,7 +140,7 @@ class GlobalRateLimiter {
if (elapsed < circuit.resetAfterMs) {
// Circuit is open, wait or throw
const waitTime = circuit.resetAfterMs - elapsed;
console.log(
logger.debug(
`Circuit breaker open for ${service} - waiting ${waitTime}ms`
);
await this.sleep(waitTime);
@@ -183,7 +185,7 @@ class GlobalRateLimiter {
config.baseDelay,
error
);
console.warn(
logger.warn(
`Rate limited by ${service} (attempt ${attempt + 1}/${
maxRetries + 1
}) - backing off ${delay}ms`
@@ -197,7 +199,7 @@ class GlobalRateLimiter {
60000,
circuit.resetAfterMs * 2
);
console.warn(
logger.warn(
`Circuit breaker opened for ${service} - will reset in ${circuit.resetAfterMs}ms`
);
}
@@ -245,7 +247,7 @@ class GlobalRateLimiter {
pauseAll(durationMs: number) {
this.globalPaused = true;
this.globalPauseUntil = Date.now() + durationMs;
console.warn(`Global rate limiter paused for ${durationMs}ms`);
logger.warn(`Global rate limiter paused for ${durationMs}ms`);
}
/**
@@ -254,7 +256,7 @@ class GlobalRateLimiter {
resume() {
this.globalPaused = false;
this.globalPauseUntil = 0;
console.log("Global rate limiter resumed");
logger.debug("Global rate limiter resumed");
}
/**
@@ -290,6 +292,39 @@ class GlobalRateLimiter {
}
}
/**
* Update concurrency multiplier for parallel enrichment processing
* This allows power users to increase enrichment speed while respecting API rate limits
* @param multiplier 1-5, where 1 is conservative and 5 is maximum
*/
updateConcurrencyMultiplier(multiplier: number) {
const clampedMultiplier = Math.max(1, Math.min(5, multiplier));
this.concurrencyMultiplier = clampedMultiplier;
logger.debug(`[Rate Limiter] Updating concurrency multiplier to ${clampedMultiplier}`);
// Update all service queues with new concurrency
for (const [service, config] of Object.entries(SERVICE_CONFIGS)) {
const queue = this.queues.get(service as ServiceName);
if (queue) {
// Scale concurrency by multiplier, but never exceed intervalCap (rate limit)
const newConcurrency = Math.min(
config.concurrency * clampedMultiplier,
config.intervalCap
);
queue.concurrency = newConcurrency;
logger.debug(`${service}: ${config.concurrency}${newConcurrency}`);
}
}
}
/**
* Get current concurrency multiplier
*/
getConcurrencyMultiplier(): number {
return this.concurrencyMultiplier;
}
private sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
@@ -300,4 +335,3 @@ export const rateLimiter = new GlobalRateLimiter();
// Export types for use in other services
export type { ServiceName, RateLimitConfig };

View File

@@ -1,4 +1,5 @@
import Parser from "rss-parser";
import { logger } from "../utils/logger";
interface RSSPodcast {
title: string;
@@ -36,18 +37,18 @@ class RSSParserService {
this.parser = new Parser({
customFields: {
feed: [
["itunes:author", "itunesAuthor"],
["itunes:image", "itunesImage"],
["itunes:explicit", "itunesExplicit"],
["itunes:type", "itunesType"],
["itunes:author", "itunesAuthor"] as any,
["itunes:image", "itunesImage"] as any,
["itunes:explicit", "itunesExplicit"] as any,
["itunes:type", "itunesType"] as any,
],
item: [
["itunes:author", "itunesAuthor"],
["itunes:duration", "itunesDuration"],
["itunes:image", "itunesImage"],
["itunes:episode", "itunesEpisode"],
["itunes:season", "itunesSeason"],
["itunes:explicit", "itunesExplicit"],
["itunes:author", "itunesAuthor"] as any,
["itunes:duration", "itunesDuration"] as any,
["itunes:image", "itunesImage"] as any,
["itunes:episode", "itunesEpisode"] as any,
["itunes:season", "itunesSeason"] as any,
["itunes:explicit", "itunesExplicit"] as any,
],
},
});
@@ -58,7 +59,7 @@ class RSSParserService {
*/
async parseFeed(feedUrl: string): Promise<ParsedPodcastFeed> {
try {
console.log(`\n [RSS PARSER] Fetching feed: ${feedUrl}`);
logger.debug(`\n [RSS PARSER] Fetching feed: ${feedUrl}`);
const feed = await this.parser.parseURL(feedUrl);
// Extract podcast metadata
@@ -72,9 +73,9 @@ class RSSParserService {
itunesId: this.extractItunesId(feed),
};
console.log(` Podcast: ${podcast.title}`);
console.log(` Author: ${podcast.author || "Unknown"}`);
console.log(` Episodes found: ${feed.items?.length || 0}`);
logger.debug(` Podcast: ${podcast.title}`);
logger.debug(` Author: ${podcast.author || "Unknown"}`);
logger.debug(` Episodes found: ${feed.items?.length || 0}`);
// Extract episodes
const episodes: RSSEpisode[] = (feed.items || [])
@@ -83,7 +84,7 @@ class RSSParserService {
// Find audio enclosure
const audioEnclosure = this.findAudioEnclosure(item);
if (!audioEnclosure) {
console.warn(
logger.warn(
` Skipping episode "${item.title}" - no audio found`
);
return null;
@@ -121,7 +122,7 @@ class RSSParserService {
return episode;
} catch (error: any) {
console.error(
logger.error(
` Error parsing episode "${item.title}":`,
error.message
);
@@ -130,11 +131,11 @@ class RSSParserService {
})
.filter((ep): ep is RSSEpisode => ep !== null);
console.log(` Successfully parsed ${episodes.length} episodes`);
logger.debug(` Successfully parsed ${episodes.length} episodes`);
return { podcast, episodes };
} catch (error: any) {
console.error(
logger.error(
`\n [RSS PARSER] Failed to parse feed:`,
error.message
);

View File

@@ -1,4 +1,5 @@
import { prisma } from "../utils/db";
import { logger } from "../utils/logger";
import { redisClient } from "../utils/redis";
interface SearchOptions {
@@ -43,6 +44,31 @@ interface PodcastSearchResult {
description: string | null;
imageUrl: string | null;
episodeCount: number;
rank?: number;
}
interface EpisodeSearchResult {
id: string;
title: string;
description: string | null;
podcastId: string;
podcastTitle: string;
publishedAt: Date;
duration: number;
audioUrl: string;
rank: number;
}
interface AudiobookSearchResult {
id: string;
title: string;
author: string | null;
narrator: string | null;
series: string | null;
description: string | null;
coverUrl: string | null;
duration: number | null;
rank: number;
}
export class SearchService {
@@ -54,6 +80,7 @@ export class SearchService {
private queryToTsquery(query: string): string {
return query
.trim()
.replace(/\s*&\s*/g, " and ")
.split(/\s+/)
.map((term) => `${term.replace(/[^\w]/g, "")}:*`)
.join(" & ");
@@ -77,9 +104,9 @@ export class SearchService {
name,
mbid,
"heroUrl",
ts_rank(search_vector, to_tsquery('english', ${tsquery})) AS rank
ts_rank("searchVector", to_tsquery('english', ${tsquery})) AS rank
FROM "Artist"
WHERE search_vector @@ to_tsquery('english', ${tsquery})
WHERE "searchVector" @@ to_tsquery('english', ${tsquery})
ORDER BY rank DESC, name ASC
LIMIT ${limit}
OFFSET ${offset}
@@ -87,7 +114,7 @@ export class SearchService {
return results;
} catch (error) {
console.error("Artist search error:", error);
logger.error("Artist search error:", error);
// Fallback to LIKE query if full-text search fails
const results = await prisma.artist.findMany({
where: {
@@ -134,13 +161,13 @@ export class SearchService {
a.year,
a."coverUrl",
GREATEST(
ts_rank(a.search_vector, to_tsquery('english', ${tsquery})),
ts_rank(ar.search_vector, to_tsquery('english', ${tsquery}))
ts_rank(a."searchVector", to_tsquery('english', ${tsquery})),
ts_rank(ar."searchVector", to_tsquery('english', ${tsquery}))
) AS rank
FROM "Album" a
LEFT JOIN "Artist" ar ON a."artistId" = ar.id
WHERE a.search_vector @@ to_tsquery('english', ${tsquery})
OR ar.search_vector @@ to_tsquery('english', ${tsquery})
WHERE a."searchVector" @@ to_tsquery('english', ${tsquery})
OR ar."searchVector" @@ to_tsquery('english', ${tsquery})
ORDER BY rank DESC, a.title ASC
LIMIT ${limit}
OFFSET ${offset}
@@ -148,7 +175,7 @@ export class SearchService {
return results;
} catch (error) {
console.error("Album search error:", error);
logger.error("Album search error:", error);
// Fallback to LIKE query - search both album title and artist name
const results = await prisma.album.findMany({
where: {
@@ -221,11 +248,11 @@ export class SearchService {
a.title as "albumTitle",
a."artistId",
ar.name as "artistName",
ts_rank(t.search_vector, to_tsquery('english', ${tsquery})) AS rank
ts_rank(t."searchVector", to_tsquery('english', ${tsquery})) AS rank
FROM "Track" t
LEFT JOIN "Album" a ON t."albumId" = a.id
LEFT JOIN "Artist" ar ON a."artistId" = ar.id
WHERE t.search_vector @@ to_tsquery('english', ${tsquery})
WHERE t."searchVector" @@ to_tsquery('english', ${tsquery})
ORDER BY rank DESC, t.title ASC
LIMIT ${limit}
OFFSET ${offset}
@@ -233,7 +260,7 @@ export class SearchService {
return results;
} catch (error) {
console.error("Track search error:", error);
logger.error("Track search error:", error);
// Fallback to LIKE query
const results = await prisma.track.findMany({
where: {
@@ -279,6 +306,238 @@ export class SearchService {
}
}
/**
* Search podcasts using PostgreSQL full-text search
*/
async searchPodcastsFTS({
query,
limit = 20,
offset = 0,
}: SearchOptions): Promise<PodcastSearchResult[]> {
if (!query || query.trim().length === 0) {
return [];
}
const tsquery = this.queryToTsquery(query);
try {
const results = await prisma.$queryRaw<PodcastSearchResult[]>`
SELECT
id,
title,
author,
description,
"imageUrl",
"episodeCount",
ts_rank("searchVector", to_tsquery('english', ${tsquery})) AS rank
FROM "Podcast"
WHERE "searchVector" @@ to_tsquery('english', ${tsquery})
ORDER BY rank DESC, title ASC
LIMIT ${limit}
OFFSET ${offset}
`;
return results;
} catch (error) {
logger.error("Podcast FTS search error:", error);
// Fallback to LIKE search
return this.searchPodcasts({ query, limit, offset });
}
}
/**
* Search podcast episodes using PostgreSQL full-text search
*/
async searchEpisodes({
query,
limit = 20,
offset = 0,
}: SearchOptions): Promise<EpisodeSearchResult[]> {
if (!query || query.trim().length === 0) {
return [];
}
const tsquery = this.queryToTsquery(query);
try {
const results = await prisma.$queryRaw<EpisodeSearchResult[]>`
SELECT
e.id,
e.title,
e.description,
e."podcastId",
e."publishedAt",
e.duration,
e."audioUrl",
p.title as "podcastTitle",
ts_rank(e."searchVector", to_tsquery('english', ${tsquery})) AS rank
FROM "PodcastEpisode" e
LEFT JOIN "Podcast" p ON e."podcastId" = p.id
WHERE e."searchVector" @@ to_tsquery('english', ${tsquery})
ORDER BY rank DESC, e."publishedAt" DESC
LIMIT ${limit}
OFFSET ${offset}
`;
return results;
} catch (error) {
logger.error("Episode search error:", error);
// Fallback to LIKE search
const results = await prisma.podcastEpisode.findMany({
where: {
OR: [
{
title: {
contains: query,
mode: "insensitive",
},
},
{
description: {
contains: query,
mode: "insensitive",
},
},
],
},
select: {
id: true,
title: true,
description: true,
podcastId: true,
publishedAt: true,
duration: true,
audioUrl: true,
podcast: {
select: {
title: true,
},
},
},
take: limit,
skip: offset,
orderBy: {
publishedAt: "desc",
},
});
return results.map((r) => ({
id: r.id,
title: r.title,
description: r.description,
podcastId: r.podcastId,
podcastTitle: r.podcast.title,
publishedAt: r.publishedAt,
duration: r.duration,
audioUrl: r.audioUrl,
rank: 0,
}));
}
}
/**
* Search audiobooks using PostgreSQL full-text search
* Falls back to external API if local cache is empty
*/
async searchAudiobooksFTS({
query,
limit = 20,
offset = 0,
}: SearchOptions): Promise<AudiobookSearchResult[]> {
if (!query || query.trim().length === 0) {
return [];
}
const tsquery = this.queryToTsquery(query);
try {
const results = await prisma.$queryRaw<AudiobookSearchResult[]>`
SELECT
id,
title,
author,
narrator,
series,
description,
"coverUrl",
duration,
ts_rank("searchVector", to_tsquery('english', ${tsquery})) AS rank
FROM "Audiobook"
WHERE "searchVector" @@ to_tsquery('english', ${tsquery})
ORDER BY rank DESC, title ASC
LIMIT ${limit}
OFFSET ${offset}
`;
// If we have results from cache, return them with transformed coverUrl
if (results.length > 0) {
return results.map((r) => ({
...r,
coverUrl: r.coverUrl ? `/audiobooks/${r.id}/cover` : null,
}));
}
// If cache is empty, fall back to LIKE search on cached audiobooks
const likeResults = await prisma.audiobook.findMany({
where: {
OR: [
{
title: {
contains: query,
mode: "insensitive",
},
},
{
author: {
contains: query,
mode: "insensitive",
},
},
{
narrator: {
contains: query,
mode: "insensitive",
},
},
{
series: {
contains: query,
mode: "insensitive",
},
},
],
},
select: {
id: true,
title: true,
author: true,
narrator: true,
series: true,
description: true,
coverUrl: true,
duration: true,
},
take: limit,
skip: offset,
orderBy: {
title: "asc",
},
});
return likeResults.map((r) => ({
...r,
coverUrl: r.coverUrl ? `/audiobooks/${r.id}/cover` : null,
rank: 0,
}));
} catch (error) {
logger.error("Audiobook FTS search error:", error);
return [];
}
}
/**
* Legacy LIKE-based podcast search (kept as fallback)
*/
async searchPodcasts({
query,
limit = 20,
@@ -288,7 +547,7 @@ export class SearchService {
return [];
}
// Simple LIKE search for podcasts (no full-text search vector on podcasts yet)
// Simple LIKE search for podcasts (fallback)
try {
const results = await prisma.podcast.findMany({
where: {
@@ -330,7 +589,7 @@ export class SearchService {
return results;
} catch (error) {
console.error("Podcast search error:", error);
logger.error("Podcast search error:", error);
return [];
}
}
@@ -342,6 +601,8 @@ export class SearchService {
albums: [],
tracks: [],
podcasts: [],
audiobooks: [],
episodes: [],
};
}
@@ -350,31 +611,53 @@ export class SearchService {
try {
const cached = await redisClient.get(cacheKey);
if (cached) {
console.log(`[SEARCH] Cache HIT for query: "${query}"`);
return JSON.parse(cached);
logger.debug(`[SEARCH] Cache HIT for query: "${query}"`);
const parsed = JSON.parse(cached);
// Transform cached audiobook coverUrls to ensure consistency
if (parsed.audiobooks && Array.isArray(parsed.audiobooks)) {
parsed.audiobooks = parsed.audiobooks.map(
(book: AudiobookSearchResult) => ({
...book,
coverUrl: book.coverUrl
? `/audiobooks/${book.id}/cover`
: null,
})
);
}
return parsed;
}
} catch (err) {
console.warn("[SEARCH] Redis cache read error:", err);
logger.warn("[SEARCH] Redis cache read error:", err);
}
console.log(
logger.debug(
`[SEARCH] Cache MISS for query: "${query}" - fetching from database`
);
const [artists, albums, tracks, podcasts] = await Promise.all([
this.searchArtists({ query, limit }),
this.searchAlbums({ query, limit }),
this.searchTracks({ query, limit }),
this.searchPodcasts({ query, limit }),
]);
const [artists, albums, tracks, podcasts, audiobooks, episodes] =
await Promise.all([
this.searchArtists({ query, limit }),
this.searchAlbums({ query, limit }),
this.searchTracks({ query, limit }),
this.searchPodcastsFTS({ query, limit }),
this.searchAudiobooksFTS({ query, limit }),
this.searchEpisodes({ query, limit }),
]);
const results = { artists, albums, tracks, podcasts };
const results = {
artists,
albums,
tracks,
podcasts,
audiobooks,
episodes,
};
// Cache for 1 hour (search results don't change often)
try {
await redisClient.setEx(cacheKey, 3600, JSON.stringify(results));
} catch (err) {
console.warn("[SEARCH] Redis cache write error:", err);
logger.warn("[SEARCH] Redis cache write error:", err);
}
return results;

File diff suppressed because it is too large Load Diff

View File

@@ -52,9 +52,21 @@ class SoulseekService {
private connecting = false;
private connectPromise: Promise<void> | null = null;
private lastConnectAttempt = 0;
private lastFailedAttempt = 0;
private readonly RECONNECT_COOLDOWN = 30000; // 30 seconds between reconnect attempts
private readonly DOWNLOAD_TIMEOUT = 180000; // 3 minutes per download attempt
private readonly MAX_DOWNLOAD_RETRIES = 3; // Try up to 3 different users
private readonly FAILED_RECONNECT_COOLDOWN = 5000; // 5 seconds after failed attempt
private readonly DOWNLOAD_TIMEOUT_INITIAL = 60000; // 1 minute for first attempt
private readonly DOWNLOAD_TIMEOUT_RETRY = 30000; // 30 seconds for retries
private readonly MAX_DOWNLOAD_RETRIES = 5; // Try up to 5 different users (more retries with shorter timeouts)
// Circuit breaker for failing users
private failedUsers = new Map<string, { failures: number; lastFailure: Date }>();
private readonly FAILURE_THRESHOLD = 3; // Block after 3 failures
private readonly FAILURE_WINDOW = 300000; // 5 minute window
// Concurrency tracking
private activeDownloads = 0;
private maxConcurrentDownloads = 0;
// Connection health tracking
private connectedAt: Date | null = null;
@@ -72,12 +84,12 @@ class SoulseekService {
private normalizeTrackTitle(title: string): string {
// First, normalize Unicode characters to ASCII equivalents for better search matching
let normalized = title
.replace(/…/g, "") // Remove ellipsis (U+2026) - files don't have this
.replace(/[''`]/g, "'") // Smart apostrophes → ASCII apostrophe
.replace(/[""]/g, '"') // Smart quotes → ASCII quotes
.replace(/\//g, " ") // Slash → space (file names can't have /)
.replace(/[–—]/g, "-") // En/em dash → hyphen
.replace(/[×]/g, "x"); // Multiplication sign → x
.replace(/…/g, "") // Remove ellipsis (U+2026) - files don't have this
.replace(/[''`]/g, "'") // Smart apostrophes → ASCII apostrophe
.replace(/[""]/g, '"') // Smart quotes → ASCII quotes
.replace(/\//g, " ") // Slash → space (file names can't have /)
.replace(/[–—]/g, "-") // En/em dash → hyphen
.replace(/[×]/g, "x"); // Multiplication sign → x
// Remove content in parentheses that contains live/remaster/remix info
const livePatterns =
@@ -178,21 +190,42 @@ class SoulseekService {
return this.connectPromise;
}
// Cooldown between reconnect attempts (skip if forced)
// Short cooldown after FAILED attempts (5s), longer after SUCCESS (30s)
const now = Date.now();
if (!force && now - this.lastConnectAttempt < this.RECONNECT_COOLDOWN) {
// If last successful connection was recent, respect cooldown
if (!force && this.lastConnectAttempt > 0 &&
now - this.lastConnectAttempt < this.RECONNECT_COOLDOWN) {
throw new Error(
"Connection cooldown - please wait before retrying"
);
}
// If last FAILED attempt was very recent (5s), wait briefly
if (!force && this.lastFailedAttempt > 0 &&
now - this.lastFailedAttempt < this.FAILED_RECONNECT_COOLDOWN) {
throw new Error(
"Connection recently failed - please wait before retrying"
);
}
this.connecting = true;
this.lastConnectAttempt = now;
this.connectPromise = this.connect().finally(() => {
this.connecting = false;
this.connectPromise = null;
});
this.connectPromise = this.connect()
.then(() => {
// Only set lastConnectAttempt on SUCCESS
this.lastConnectAttempt = Date.now();
this.lastFailedAttempt = 0; // Clear failed tracking
})
.catch((err) => {
// Track failed attempt separately (shorter cooldown)
this.lastFailedAttempt = Date.now();
throw err;
})
.finally(() => {
this.connecting = false;
this.connectPromise = null;
});
return this.connectPromise;
}
@@ -450,9 +483,75 @@ class SoulseekService {
});
}
/**
* Check if a user should be blocked due to recent failures
*/
private isUserBlocked(username: string): boolean {
const record = this.failedUsers.get(username);
if (!record) return false;
// Clear old failures outside the window
if (Date.now() - record.lastFailure.getTime() > this.FAILURE_WINDOW) {
this.failedUsers.delete(username);
return false;
}
return record.failures >= this.FAILURE_THRESHOLD;
}
/**
* Record a user failure for circuit breaker
*/
private recordUserFailure(username: string): void {
const record = this.failedUsers.get(username) || {
failures: 0,
lastFailure: new Date(),
};
record.failures++;
record.lastFailure = new Date();
this.failedUsers.set(username, record);
if (record.failures >= this.FAILURE_THRESHOLD) {
sessionLog(
"SOULSEEK",
`User ${username} blocked: ${record.failures} failures in ${Math.round(
this.FAILURE_WINDOW / 60000
)}min window`,
"WARN"
);
}
}
/**
* Categorize download errors for smarter retry behavior
*/
private categorizeError(error: Error): {
type: "user_offline" | "timeout" | "connection" | "file_not_found" | "unknown";
skipUser: boolean;
} {
const message = error.message.toLowerCase();
if (message.includes("user not exist") || message.includes("user offline")) {
return { type: "user_offline", skipUser: true };
}
if (message.includes("timed out") || message.includes("timeout")) {
return { type: "timeout", skipUser: true };
}
if (
message.includes("connection refused") ||
message.includes("connection reset")
) {
return { type: "connection", skipUser: true };
}
if (message.includes("file not found") || message.includes("no such file")) {
return { type: "file_not_found", skipUser: true };
}
return { type: "unknown", skipUser: false };
}
/**
* Rank all search results and return sorted matches (best first)
* Filters out matches below minimum score threshold
* Filters out matches below minimum score threshold and blocked users
*/
private rankAllResults(
results: SearchResult[],
@@ -462,9 +561,11 @@ class SoulseekService {
// Normalize search terms for matching
const normalizedArtist = artistName
.toLowerCase()
.replace(/\s*&\s*/g, " and ")
.replace(/[^a-z0-9\s]/g, "");
const normalizedTitle = trackTitle
.toLowerCase()
.replace(/\s*&\s*/g, " and ")
.replace(/[^a-z0-9\s]/g, "")
.replace(/^\d+\s*[-.]?\s*/, ""); // Remove leading track numbers
@@ -476,15 +577,24 @@ class SoulseekService {
.filter((w) => w.length > 2)
.slice(0, 3);
const scored = results.map((file) => {
// Filter out blocked users first
const availableResults = results.filter(
(file) => !this.isUserBlocked(file.user)
);
const scored = availableResults.map((file) => {
const filename = (file.file || "").toLowerCase();
const normalizedFilename = filename.replace(/[^a-z0-9]/g, "");
const shortFilename = filename.split(/[/\\]/).pop() || filename;
let score = 0;
// Prefer files with slots available (+20)
if (file.slots) score += 20;
// Strongly prefer files with slots available (+40)
if (file.slots) score += 40;
// Prefer high-speed peers
if (file.speed > 1000000) score += 15; // >1MB/s
else if (file.speed > 500000) score += 5; // >500KB/s
// Check if filename contains artist (full or first word)
if (
@@ -561,8 +671,25 @@ class SoulseekService {
*/
async downloadTrack(
match: TrackMatch,
destPath: string
destPath: string,
attemptNumber: number = 0
): Promise<{ success: boolean; error?: string }> {
// Track active downloads for concurrency monitoring
this.activeDownloads++;
this.maxConcurrentDownloads = Math.max(
this.maxConcurrentDownloads,
this.activeDownloads
);
sessionLog(
"SOULSEEK",
`Active downloads: ${this.activeDownloads}/${this.maxConcurrentDownloads} max`
);
// Use shorter timeout for retries
const timeout =
attemptNumber === 0
? this.DOWNLOAD_TIMEOUT_INITIAL
: this.DOWNLOAD_TIMEOUT_RETRY;
try {
await this.ensureConnected();
} catch (err: any) {
@@ -587,17 +714,20 @@ class SoulseekService {
return new Promise((resolve) => {
let resolved = false;
// Timeout handler - 3 minutes max per download attempt
// Timeout handler - progressive timeout based on attempt number
const timeoutId = setTimeout(() => {
if (!resolved) {
resolved = true;
this.activeDownloads--;
sessionLog(
"SOULSEEK",
`Download timed out after ${
this.DOWNLOAD_TIMEOUT / 1000
}s: ${match.filename}`,
`Download timed out after ${timeout / 1000}s: ${
match.filename
}`,
"WARN"
);
// Record user failure for circuit breaker
this.recordUserFailure(match.username);
// Clean up partial file if it exists
if (fs.existsSync(destPath)) {
try {
@@ -608,7 +738,7 @@ class SoulseekService {
}
resolve({ success: false, error: "Download timed out" });
}
}, this.DOWNLOAD_TIMEOUT);
}, timeout);
// Create a SearchResult object for the download
const downloadFile: SearchResult = {
@@ -629,13 +759,21 @@ class SoulseekService {
if (resolved) return; // Already timed out
resolved = true;
clearTimeout(timeoutId);
this.activeDownloads--;
if (err) {
const errorInfo = this.categorizeError(err);
sessionLog(
"SOULSEEK",
`Download failed: ${err.message}`,
`Download failed (${errorInfo.type}): ${err.message}`,
"ERROR"
);
// Record user failure if error indicates user issue
if (errorInfo.skipUser) {
this.recordUserFailure(match.username);
}
return resolve({ success: false, error: err.message });
}
@@ -952,7 +1090,7 @@ class SoulseekService {
sanitize(match.filename)
);
const result = await this.downloadTrack(match, destPath);
const result = await this.downloadTrack(match, destPath, attempt);
if (result.success) {
if (attempt > 0) {
sessionLog(

View File

@@ -1,4 +1,5 @@
import axios from "axios";
import { logger } from "../utils/logger";
/**
* Spotify Service
@@ -84,7 +85,7 @@ class SpotifyService {
for (const endpoint of endpoints) {
try {
console.log(`Spotify: Fetching anonymous token from ${endpoint.url}...`);
logger.debug(`Spotify: Fetching anonymous token from ${endpoint.url}...`);
const response = await axios.get(endpoint.url, {
params: endpoint.params,
@@ -104,15 +105,15 @@ class SpotifyService {
// Anonymous tokens last about an hour
this.tokenExpiry = Date.now() + 3600 * 1000;
console.log("Spotify: Got anonymous token");
logger.debug("Spotify: Got anonymous token");
return token;
}
} catch (error: any) {
console.log(`Spotify: Token endpoint failed (${error.response?.status || error.message})`);
logger.debug(`Spotify: Token endpoint failed (${error.response?.status || error.message})`);
}
}
console.error("Spotify: All token endpoints failed - API browsing unavailable");
logger.error("Spotify: All token endpoints failed - API browsing unavailable");
return null;
}
@@ -148,7 +149,7 @@ class SpotifyService {
}
try {
console.log(`Spotify: Fetching playlist ${playlistId}...`);
logger.debug(`Spotify: Fetching playlist ${playlistId}...`);
const playlistResponse = await axios.get(
`https://api.spotify.com/v1/playlists/${playlistId}`,
@@ -165,7 +166,7 @@ class SpotifyService {
);
const playlist = playlistResponse.data;
console.log(`Spotify: Fetched playlist "${playlist.name}" with ${playlist.tracks?.items?.length || 0} tracks`);
logger.debug(`Spotify: Fetched playlist "${playlist.name}" with ${playlist.tracks?.items?.length || 0} tracks`);
const tracks: SpotifyTrack[] = [];
@@ -180,7 +181,7 @@ class SpotifyService {
// Debug log for tracks with Unknown Album
if (albumName === "Unknown Album") {
console.log(`Spotify: Track "${track.name}" has no album data:`, JSON.stringify({
logger.debug(`Spotify: Track "${track.name}" has no album data:`, JSON.stringify({
trackId: track.id,
album: track.album,
hasAlbum: !!track.album,
@@ -203,7 +204,7 @@ class SpotifyService {
});
}
console.log(`Spotify: Processed ${tracks.length} tracks`);
logger.debug(`Spotify: Processed ${tracks.length} tracks`);
return {
id: playlist.id,
@@ -216,7 +217,7 @@ class SpotifyService {
isPublic: playlist.public ?? true,
};
} catch (error: any) {
console.error("Spotify API error:", error.response?.status, error.response?.data || error.message);
logger.error("Spotify API error:", error.response?.status, error.response?.data || error.message);
// Fallback to embed HTML parsing
return await this.fetchPlaylistViaEmbedHtml(playlistId);
@@ -228,7 +229,7 @@ class SpotifyService {
*/
private async fetchPlaylistViaEmbedHtml(playlistId: string): Promise<SpotifyPlaylist | null> {
try {
console.log("Spotify: Trying embed HTML parsing...");
logger.debug("Spotify: Trying embed HTML parsing...");
const response = await axios.get(
`https://open.spotify.com/embed/playlist/${playlistId}`,
@@ -244,7 +245,7 @@ class SpotifyService {
const match = html.match(/<script id="__NEXT_DATA__" type="application\/json">([^<]+)<\/script>/);
if (!match) {
console.error("Spotify: Could not find __NEXT_DATA__ in embed HTML");
logger.error("Spotify: Could not find __NEXT_DATA__ in embed HTML");
return null;
}
@@ -255,7 +256,7 @@ class SpotifyService {
|| data.props?.pageProps;
if (!playlistData) {
console.error("Spotify: Could not find playlist data in embed JSON");
logger.error("Spotify: Could not find playlist data in embed JSON");
return null;
}
@@ -278,7 +279,7 @@ class SpotifyService {
// Debug log for tracks with Unknown Album
if (embedAlbumName === "Unknown Album") {
console.log(`Spotify Embed: Track "${trackData.title || trackData.name}" has no album data:`, JSON.stringify({
logger.debug(`Spotify Embed: Track "${trackData.title || trackData.name}" has no album data:`, JSON.stringify({
album: trackData.album,
albumName: trackData.albumName,
hasAlbum: !!trackData.album,
@@ -311,7 +312,7 @@ class SpotifyService {
isPublic: true,
};
} catch (error: any) {
console.error("Spotify embed HTML error:", error.message);
logger.error("Spotify embed HTML error:", error.message);
return null;
}
}
@@ -330,7 +331,7 @@ class SpotifyService {
playlistId = parsed.id;
}
console.log("Spotify: Fetching public playlist via anonymous token");
logger.debug("Spotify: Fetching public playlist via anonymous token");
return await this.fetchPlaylistViaAnonymousApi(playlistId);
}
@@ -341,13 +342,13 @@ class SpotifyService {
async getFeaturedPlaylists(limit: number = 20): Promise<SpotifyPlaylistPreview[]> {
const token = await this.getAnonymousToken();
if (!token) {
console.error("Spotify: Cannot fetch featured playlists without token");
logger.error("Spotify: Cannot fetch featured playlists without token");
return [];
}
// Try official API first
try {
console.log("Spotify: Trying featured playlists via official API...");
logger.debug("Spotify: Trying featured playlists via official API...");
const response = await axios.get(
"https://api.spotify.com/v1/browse/featured-playlists",
@@ -366,7 +367,7 @@ class SpotifyService {
const playlists = response.data?.playlists?.items || [];
if (playlists.length > 0) {
console.log(`Spotify: Got ${playlists.length} featured playlists via official API`);
logger.debug(`Spotify: Got ${playlists.length} featured playlists via official API`);
return playlists.map((playlist: any) => ({
id: playlist.id,
name: playlist.name,
@@ -377,12 +378,12 @@ class SpotifyService {
}));
}
} catch (error: any) {
console.log("Spotify: Featured playlists API failed, trying search fallback...", error.response?.status || error.message);
logger.debug("Spotify: Featured playlists API failed, trying search fallback...", error.response?.status || error.message);
}
// Fallback: Search for popular playlists
try {
console.log("Spotify: Trying search fallback for featured playlists...");
logger.debug("Spotify: Trying search fallback for featured playlists...");
// Search for popular/curated playlists
const searches = ["Today's Top Hits", "Hot Hits", "Viral Hits", "All Out", "Rock Classics", "Chill Hits"];
@@ -400,10 +401,10 @@ class SpotifyService {
if (allPlaylists.length >= limit) break;
}
console.log(`Spotify: Got ${allPlaylists.length} playlists via search fallback`);
logger.debug(`Spotify: Got ${allPlaylists.length} playlists via search fallback`);
return allPlaylists.slice(0, limit);
} catch (searchError: any) {
console.error("Spotify: Search fallback also failed:", searchError.message);
logger.error("Spotify: Search fallback also failed:", searchError.message);
return [];
}
}
@@ -418,7 +419,7 @@ class SpotifyService {
}
try {
console.log(`Spotify: Fetching playlists for category ${categoryId}...`);
logger.debug(`Spotify: Fetching playlists for category ${categoryId}...`);
const response = await axios.get(
`https://api.spotify.com/v1/browse/categories/${categoryId}/playlists`,
@@ -445,7 +446,7 @@ class SpotifyService {
trackCount: playlist.tracks?.total || 0,
}));
} catch (error: any) {
console.error(`Spotify category playlists error for ${categoryId}:`, error.message);
logger.error(`Spotify category playlists error for ${categoryId}:`, error.message);
return [];
}
}
@@ -456,12 +457,12 @@ class SpotifyService {
async searchPlaylists(query: string, limit: number = 20): Promise<SpotifyPlaylistPreview[]> {
const token = await this.getAnonymousToken();
if (!token) {
console.error("Spotify: Cannot search without token");
logger.error("Spotify: Cannot search without token");
return [];
}
try {
console.log(`Spotify: Searching playlists for "${query}"...`);
logger.debug(`Spotify: Searching playlists for "${query}"...`);
const response = await axios.get(
"https://api.spotify.com/v1/search",
@@ -482,7 +483,7 @@ class SpotifyService {
);
const playlists = response.data?.playlists?.items || [];
console.log(`Spotify: Found ${playlists.length} playlists for "${query}"`);
logger.debug(`Spotify: Found ${playlists.length} playlists for "${query}"`);
return playlists
.filter((playlist: any) => playlist && playlist.id) // Filter out null entries
@@ -495,10 +496,10 @@ class SpotifyService {
trackCount: playlist.tracks?.total || 0,
}));
} catch (error: any) {
console.error("Spotify search playlists error:", error.response?.status, error.response?.data || error.message);
logger.error("Spotify search playlists error:", error.response?.status, error.response?.data || error.message);
// If unauthorized, try refreshing token and retry once
if (error.response?.status === 401) {
console.log("Spotify: Token expired, refreshing...");
logger.debug("Spotify: Token expired, refreshing...");
this.anonymousToken = null;
this.tokenExpiry = 0;
const newToken = await this.getAnonymousToken();
@@ -527,7 +528,7 @@ class SpotifyService {
trackCount: p.tracks?.total || 0,
}));
} catch (retryError) {
console.error("Spotify: Retry also failed");
logger.error("Spotify: Retry also failed");
}
}
}
@@ -566,7 +567,7 @@ class SpotifyService {
imageUrl: cat.icons?.[0]?.url || null,
}));
} catch (error: any) {
console.error("Spotify categories error:", error.message);
logger.error("Spotify categories error:", error.message);
return [];
}
}

Some files were not shown because too many files have changed in this diff Show More