Merge branch 'master' into inode-bug-fix

This commit is contained in:
Jason Axley 2025-12-03 11:43:54 -08:00 committed by GitHub
commit d6fed92b11
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
87 changed files with 4539 additions and 751 deletions

View file

@ -48,9 +48,14 @@ class BackupManager {
}
async init() {
const backupsDirExists = await fs.pathExists(this.backupPath)
if (!backupsDirExists) {
await fs.ensureDir(this.backupPath)
try {
const backupsDirExists = await fs.pathExists(this.backupPath)
if (!backupsDirExists) {
await fs.ensureDir(this.backupPath)
}
} catch (error) {
Logger.error(`[BackupManager] Failed to ensure backup directory at "${this.backupPath}": ${error.message}`)
throw new Error(`[BackupManager] Failed to ensure backup directory at "${this.backupPath}"`, { cause: error })
}
await this.loadBackups()

View file

@ -24,10 +24,15 @@ class CacheManager {
this.ImageCachePath = Path.join(this.CachePath, 'images')
this.ItemCachePath = Path.join(this.CachePath, 'items')
await fs.ensureDir(this.CachePath)
await fs.ensureDir(this.CoverCachePath)
await fs.ensureDir(this.ImageCachePath)
await fs.ensureDir(this.ItemCachePath)
try {
await fs.ensureDir(this.CachePath)
await fs.ensureDir(this.CoverCachePath)
await fs.ensureDir(this.ImageCachePath)
await fs.ensureDir(this.ItemCachePath)
} catch (error) {
Logger.error(`[CacheManager] Failed to create cache directories at "${this.CachePath}": ${error.message}`)
throw new Error(`[CacheManager] Failed to create cache directories at "${this.CachePath}"`, { cause: error })
}
}
async handleCoverCache(res, libraryItemId, options = {}) {

View file

@ -0,0 +1,254 @@
const { setMaxListeners } = require('events')
const Logger = require('../Logger')
const BookFinder = require('../finders/BookFinder')
const PodcastFinder = require('../finders/PodcastFinder')
/**
* Manager for handling streaming cover search across multiple providers
*/
class CoverSearchManager {
constructor() {
/** @type {Map<string, AbortController>} Map of requestId to AbortController */
this.activeSearches = new Map()
// Default timeout for each provider search
this.providerTimeout = 10000 // 10 seconds
// Set to 0 to disable the max listeners limit
// We need one listener per provider (15+) and may have multiple concurrent searches
this.maxListeners = 0
}
/**
* Start a streaming cover search
* @param {string} requestId - Unique identifier for this search request
* @param {Object} searchParams - Search parameters
* @param {string} searchParams.title - Title to search for
* @param {string} searchParams.author - Author to search for (optional)
* @param {string} searchParams.provider - Provider to search (or 'all')
* @param {boolean} searchParams.podcast - Whether this is a podcast search
* @param {Function} onResult - Callback for each result chunk
* @param {Function} onComplete - Callback when search completes
* @param {Function} onError - Callback for errors
*/
async startSearch(requestId, searchParams, onResult, onComplete, onError) {
if (this.activeSearches.has(requestId)) {
Logger.warn(`[CoverSearchManager] Search with requestId ${requestId} already exists`)
return
}
const abortController = new AbortController()
// Increase max listeners on this signal to accommodate parallel provider searches
// AbortSignal is an EventTarget, so we use the events module's setMaxListeners
setMaxListeners(this.maxListeners, abortController.signal)
this.activeSearches.set(requestId, abortController)
Logger.info(`[CoverSearchManager] Starting search ${requestId} with params:`, searchParams)
try {
const { title, author, provider, podcast } = searchParams
if (podcast) {
await this.searchPodcastCovers(requestId, title, abortController.signal, onResult, onError)
} else {
await this.searchBookCovers(requestId, provider, title, author, abortController.signal, onResult, onError)
}
if (!abortController.signal.aborted) {
onComplete()
}
} catch (error) {
if (error.name === 'AbortError') {
Logger.info(`[CoverSearchManager] Search ${requestId} was cancelled`)
} else {
Logger.error(`[CoverSearchManager] Search ${requestId} failed:`, error)
onError(error.message)
}
} finally {
this.activeSearches.delete(requestId)
}
}
/**
* Cancel an active search
* @param {string} requestId - Request ID to cancel
*/
cancelSearch(requestId) {
const abortController = this.activeSearches.get(requestId)
if (abortController) {
Logger.info(`[CoverSearchManager] Cancelling search ${requestId}`)
abortController.abort()
this.activeSearches.delete(requestId)
return true
}
return false
}
/**
* Search for podcast covers
*/
async searchPodcastCovers(requestId, title, signal, onResult, onError) {
try {
const results = await this.executeWithTimeout(() => PodcastFinder.findCovers(title), this.providerTimeout, signal)
if (signal.aborted) return
const covers = this.extractCoversFromResults(results)
if (covers.length > 0) {
onResult({
provider: 'itunes',
covers,
total: covers.length
})
}
} catch (error) {
if (error.name !== 'AbortError') {
Logger.error(`[CoverSearchManager] Podcast search failed:`, error)
onError('itunes', error.message)
}
}
}
/**
* Search for book covers across providers
*/
async searchBookCovers(requestId, provider, title, author, signal, onResult, onError) {
let providers = []
if (provider === 'all') {
providers = [...BookFinder.providers]
} else if (provider === 'best') {
// Best providers: google, fantlab, and audible.com
providers = ['google', 'fantlab', 'audible']
} else {
providers = [provider]
}
Logger.debug(`[CoverSearchManager] Searching ${providers.length} providers in parallel`)
// Search all providers in parallel
const searchPromises = providers.map(async (providerName) => {
if (signal.aborted) return
try {
const searchResults = await this.executeWithTimeout(() => BookFinder.search(null, providerName, title, author || ''), this.providerTimeout, signal)
if (signal.aborted) return
const covers = this.extractCoversFromResults(searchResults)
Logger.debug(`[CoverSearchManager] Found ${covers.length} covers from ${providerName}`)
if (covers.length > 0) {
onResult({
provider: providerName,
covers,
total: covers.length
})
}
} catch (error) {
if (error.name !== 'AbortError') {
Logger.warn(`[CoverSearchManager] Provider ${providerName} failed:`, error.message)
onError(providerName, error.message)
}
}
})
await Promise.allSettled(searchPromises)
}
/**
* Execute a promise with timeout and abort signal
*/
async executeWithTimeout(fn, timeout, signal) {
return new Promise(async (resolve, reject) => {
let abortHandler = null
let timeoutId = null
// Cleanup function to ensure we always remove listeners
const cleanup = () => {
if (timeoutId) {
clearTimeout(timeoutId)
timeoutId = null
}
if (abortHandler) {
signal.removeEventListener('abort', abortHandler)
abortHandler = null
}
}
// Set up timeout
timeoutId = setTimeout(() => {
cleanup()
const error = new Error('Provider timeout')
error.name = 'TimeoutError'
reject(error)
}, timeout)
// Check if already aborted
if (signal.aborted) {
cleanup()
const error = new Error('Search cancelled')
error.name = 'AbortError'
reject(error)
return
}
// Set up abort handler
abortHandler = () => {
cleanup()
const error = new Error('Search cancelled')
error.name = 'AbortError'
reject(error)
}
signal.addEventListener('abort', abortHandler)
try {
const result = await fn()
cleanup()
resolve(result)
} catch (error) {
cleanup()
reject(error)
}
})
}
/**
* Extract cover URLs from search results
*/
extractCoversFromResults(results) {
const covers = []
if (!Array.isArray(results)) return covers
results.forEach((result) => {
if (typeof result === 'string') {
covers.push(result)
}
if (result.covers && Array.isArray(result.covers)) {
covers.push(...result.covers)
}
if (result.cover) {
covers.push(result.cover)
}
})
// Remove duplicates
return [...new Set(covers)]
}
/**
* Cancel all active searches (cleanup on server shutdown)
*/
cancelAllSearches() {
Logger.info(`[CoverSearchManager] Cancelling ${this.activeSearches.size} active searches`)
for (const [requestId, abortController] of this.activeSearches.entries()) {
abortController.abort()
}
this.activeSearches.clear()
}
}
module.exports = new CoverSearchManager()

View file

@ -37,8 +37,13 @@ class LogManager {
}
async ensureLogDirs() {
await fs.ensureDir(this.DailyLogPath)
await fs.ensureDir(this.ScanLogPath)
try {
await fs.ensureDir(this.DailyLogPath)
await fs.ensureDir(this.ScanLogPath)
} catch (error) {
console.error(`[LogManager] Failed to create log directories at "${this.DailyLogPath}": ${error.message}`)
throw new Error(`[LogManager] Failed to create log directories at "${this.DailyLogPath}"`, { cause: error })
}
}
/**
@ -169,7 +174,7 @@ class LogManager {
/**
* Most recent 5000 daily logs
*
* @returns {LogObject[]}
* @returns {string}
*/
getMostRecentCurrentDailyLogs() {
return this.currentDailyLog?.logs.slice(-5000) || []

View file

@ -38,7 +38,12 @@ class MigrationManager {
if (!(await fs.pathExists(this.configPath))) throw new Error(`Config path does not exist: ${this.configPath}`)
this.migrationsDir = path.join(this.configPath, 'migrations')
await fs.ensureDir(this.migrationsDir)
try {
await fs.ensureDir(this.migrationsDir)
} catch (error) {
Logger.error(`[MigrationManager] Failed to create migrations directory at "${this.migrationsDir}": ${error.message}`)
throw new Error(`[MigrationManager] Failed to create migrations directory at "${this.migrationsDir}"`, { cause: error })
}
this.serverVersion = this.extractVersionFromTag(serverVersion)
if (!this.serverVersion) throw new Error(`Invalid server version: ${serverVersion}. Expected a version tag like v1.2.3.`)

View file

@ -459,7 +459,12 @@ class PlaybackSessionManager {
* Remove all stream folders in `/metadata/streams`
*/
async removeOrphanStreams() {
await fs.ensureDir(this.StreamsPath)
try {
await fs.ensureDir(this.StreamsPath)
} catch (error) {
Logger.error(`[PlaybackSessionManager] Failed to create streams directory at "${this.StreamsPath}": ${error.message}`)
throw new Error(`[PlaybackSessionManager] Failed to create streams directory at "${this.StreamsPath}"`, { cause: error })
}
try {
const streamsInPath = await fs.readdir(this.StreamsPath)
for (const streamId of streamsInPath) {

View file

@ -127,10 +127,20 @@ class PodcastManager {
})
let success = !!ffmpegDownloadResponse?.success
// If failed due to ffmpeg error, retry without tagging
if (success) {
// Attempt to ffprobe and add podcast episode audio file
success = await this.scanAddPodcastEpisodeAudioFile()
if (!success) {
Logger.error(`[PodcastManager] Failed to scan and add podcast episode audio file - removing file`)
await fs.remove(this.currentDownload.targetPath)
}
}
// If failed due to ffmpeg or ffprobe error, retry without tagging
// e.g. RSS feed may have incorrect file extension and file type
// See https://github.com/advplyr/audiobookshelf/issues/3837
if (!success && ffmpegDownloadResponse?.isFfmpegError) {
// e.g. Ffmpeg may be download the file without streams causing the ffprobe to fail
if (!success && !ffmpegDownloadResponse?.isRequestError) {
Logger.info(`[PodcastManager] Retrying episode download without tagging`)
// Download episode only
success = await downloadFile(this.currentDownload.url, this.currentDownload.targetPath)
@ -139,23 +149,20 @@ class PodcastManager {
Logger.error(`[PodcastManager] Podcast Episode download failed`, error)
return false
})
if (success) {
success = await this.scanAddPodcastEpisodeAudioFile()
if (!success) {
Logger.error(`[PodcastManager] Failed to scan and add podcast episode audio file - removing file`)
await fs.remove(this.currentDownload.targetPath)
}
}
}
if (success) {
success = await this.scanAddPodcastEpisodeAudioFile()
if (!success) {
await fs.remove(this.currentDownload.targetPath)
this.currentDownload.setFinished(false)
const taskFailedString = {
text: 'Failed',
key: 'MessageTaskFailed'
}
task.setFailed(taskFailedString)
} else {
Logger.info(`[PodcastManager] Successfully downloaded podcast episode "${this.currentDownload.episodeTitle}"`)
this.currentDownload.setFinished(true)
task.setFinished()
}
Logger.info(`[PodcastManager] Successfully downloaded podcast episode "${this.currentDownload.episodeTitle}"`)
this.currentDownload.setFinished(true)
task.setFinished()
} else {
const taskFailedString = {
text: 'Failed',