Merge branch 'master' into inode-bug-fix

This commit is contained in:
Jason Axley 2025-12-03 11:43:54 -08:00 committed by GitHub
commit d6fed92b11
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
87 changed files with 4539 additions and 751 deletions

View file

@ -407,8 +407,7 @@ class Server {
const nextApp = next({ dev: Logger.isDev, dir: ReactClientPath })
const handle = nextApp.getRequestHandler()
await nextApp.prepare()
router.get('*', (req, res) => handle(req, res))
router.post('/internal-api/*', (req, res) => handle(req, res))
router.all('*', (req, res) => handle(req, res))
}
const unixSocketPrefix = 'unix/'

View file

@ -2,6 +2,7 @@ const SocketIO = require('socket.io')
const Logger = require('./Logger')
const Database = require('./Database')
const TokenManager = require('./auth/TokenManager')
const CoverSearchManager = require('./managers/CoverSearchManager')
/**
* @typedef SocketClient
@ -180,6 +181,10 @@ class SocketAuthority {
// Scanning
socket.on('cancel_scan', (libraryId) => this.cancelScan(libraryId))
// Cover search streaming
socket.on('search_covers', (payload) => this.handleCoverSearch(socket, payload))
socket.on('cancel_cover_search', (requestId) => this.handleCancelCoverSearch(socket, requestId))
// Logs
socket.on('set_log_listener', (level) => Logger.addSocketListener(socket, level))
socket.on('remove_log_listener', () => Logger.removeSocketListener(socket.id))
@ -200,6 +205,10 @@ class SocketAuthority {
const disconnectTime = Date.now() - _client.connected_at
Logger.info(`[SocketAuthority] Socket ${socket.id} disconnected from client "${_client.user.username}" after ${disconnectTime}ms (Reason: ${reason})`)
// Cancel any active cover searches for this socket
this.cancelSocketCoverSearches(socket.id)
delete this.clients[socket.id]
}
})
@ -300,5 +309,100 @@ class SocketAuthority {
Logger.debug('[SocketAuthority] Cancel scan', id)
this.Server.cancelLibraryScan(id)
}
/**
* Handle cover search request via WebSocket
* @param {SocketIO.Socket} socket
* @param {Object} payload
*/
async handleCoverSearch(socket, payload) {
const client = this.clients[socket.id]
if (!client?.user) {
Logger.error('[SocketAuthority] Unauthorized cover search request')
socket.emit('cover_search_error', {
requestId: payload.requestId,
error: 'Unauthorized'
})
return
}
const { requestId, title, author, provider, podcast } = payload
if (!requestId || !title) {
Logger.error('[SocketAuthority] Invalid cover search request')
socket.emit('cover_search_error', {
requestId,
error: 'Invalid request parameters'
})
return
}
Logger.info(`[SocketAuthority] User ${client.user.username} initiated cover search ${requestId}`)
// Callback for streaming results to client
const onResult = (result) => {
socket.emit('cover_search_result', {
requestId,
provider: result.provider,
covers: result.covers,
total: result.total
})
}
// Callback when search completes
const onComplete = () => {
Logger.info(`[SocketAuthority] Cover search ${requestId} completed`)
socket.emit('cover_search_complete', { requestId })
}
// Callback for provider errors
const onError = (provider, errorMessage) => {
socket.emit('cover_search_provider_error', {
requestId,
provider,
error: errorMessage
})
}
// Start the search
CoverSearchManager.startSearch(requestId, { title, author, provider, podcast }, onResult, onComplete, onError).catch((error) => {
Logger.error(`[SocketAuthority] Cover search ${requestId} failed:`, error)
socket.emit('cover_search_error', {
requestId,
error: error.message
})
})
}
/**
* Handle cancel cover search request
* @param {SocketIO.Socket} socket
* @param {string} requestId
*/
handleCancelCoverSearch(socket, requestId) {
const client = this.clients[socket.id]
if (!client?.user) {
Logger.error('[SocketAuthority] Unauthorized cancel cover search request')
return
}
Logger.info(`[SocketAuthority] User ${client.user.username} cancelled cover search ${requestId}`)
const cancelled = CoverSearchManager.cancelSearch(requestId)
if (cancelled) {
socket.emit('cover_search_cancelled', { requestId })
}
}
/**
* Cancel all cover searches associated with a socket (called on disconnect)
* @param {string} socketId
*/
cancelSocketCoverSearches(socketId) {
// Get all active search request IDs and cancel those that might belong to this socket
// Since we don't track socket-to-request mapping, we log this for debugging
// The client will handle reconnection gracefully
Logger.debug(`[SocketAuthority] Socket ${socketId} disconnected, any active searches will timeout`)
}
}
module.exports = new SocketAuthority()

View file

@ -527,7 +527,16 @@ class OidcAuthStrategy {
// For absolute URLs, ensure they point to the same origin
const callbackUrlObj = new URL(callbackUrl)
const currentProtocol = req.secure || req.get('x-forwarded-proto') === 'https' ? 'https' : 'http'
// NPM appends both http and https in x-forwarded-proto sometimes, so we need to check for both
const xfp = (req.get('x-forwarded-proto') || '').toLowerCase()
const currentProtocol =
req.secure ||
xfp
.split(',')
.map((s) => s.trim())
.includes('https')
? 'https'
: 'http'
const currentHost = req.get('host')
// Check if protocol and host match exactly

View file

@ -12,9 +12,9 @@ class TokenManager {
constructor() {
/** @type {number} Refresh token expiry in seconds */
this.RefreshTokenExpiry = parseInt(process.env.REFRESH_TOKEN_EXPIRY) || 7 * 24 * 60 * 60 // 7 days
this.RefreshTokenExpiry = parseInt(process.env.REFRESH_TOKEN_EXPIRY) || 30 * 24 * 60 * 60 // 30 days
/** @type {number} Access token expiry in seconds */
this.AccessTokenExpiry = parseInt(process.env.ACCESS_TOKEN_EXPIRY) || 12 * 60 * 60 // 12 hours
this.AccessTokenExpiry = parseInt(process.env.ACCESS_TOKEN_EXPIRY) || 1 * 60 * 60 // 1 hour
if (parseInt(process.env.REFRESH_TOKEN_EXPIRY) > 0) {
Logger.info(`[TokenManager] Refresh token expiry set from ENV variable to ${this.RefreshTokenExpiry} seconds`)

View file

@ -221,13 +221,11 @@ class LibraryController {
const includeArray = (req.query.include || '').split(',')
if (includeArray.includes('filterdata')) {
const filterdata = await libraryFilters.getFilterData(req.library.mediaType, req.library.id)
const customMetadataProviders = await Database.customMetadataProviderModel.getForClientByMediaType(req.library.mediaType)
return res.json({
filterdata,
issues: filterdata.numIssues,
numUserPlaylists: await Database.playlistModel.getNumPlaylistsForUserAndLibrary(req.user.id, req.library.id),
customMetadataProviders,
library: req.library.toOldJSON()
})
}

View file

@ -142,6 +142,9 @@ class MiscController {
Logger.warn('Cannot disable iframe when ALLOW_IFRAME is enabled in environment')
return res.status(400).send('Cannot disable iframe when ALLOW_IFRAME is enabled in environment')
}
if (settingsUpdate.allowedOrigins && !Array.isArray(settingsUpdate.allowedOrigins)) {
return res.status(400).send('allowedOrigins must be an array')
}
const madeUpdates = Database.serverSettings.update(settingsUpdate)
if (madeUpdates) {

View file

@ -4,7 +4,29 @@ const BookFinder = require('../finders/BookFinder')
const PodcastFinder = require('../finders/PodcastFinder')
const AuthorFinder = require('../finders/AuthorFinder')
const Database = require('../Database')
const { isValidASIN } = require('../utils')
const { isValidASIN, getQueryParamAsString, ValidationError, NotFoundError } = require('../utils')
// Provider name mappings for display purposes
const providerMap = {
all: 'All',
best: 'Best',
google: 'Google Books',
itunes: 'iTunes',
openlibrary: 'Open Library',
fantlab: 'FantLab.ru',
audiobookcovers: 'AudiobookCovers.com',
audible: 'Audible.com',
'audible.ca': 'Audible.ca',
'audible.uk': 'Audible.co.uk',
'audible.au': 'Audible.com.au',
'audible.fr': 'Audible.fr',
'audible.de': 'Audible.de',
'audible.jp': 'Audible.co.jp',
'audible.it': 'Audible.it',
'audible.in': 'Audible.in',
'audible.es': 'Audible.es',
audnexus: 'Audnexus'
}
/**
* @typedef RequestUserObject
@ -16,6 +38,44 @@ const { isValidASIN } = require('../utils')
class SearchController {
constructor() {}
/**
* Fetches a library item by ID
* @param {string} id - Library item ID
* @param {string} methodName - Name of the calling method for logging
* @returns {Promise<import('../models/LibraryItem').LibraryItemExpanded>}
*/
static async fetchLibraryItem(id) {
const libraryItem = await Database.libraryItemModel.getExpandedById(id)
if (!libraryItem) {
throw new NotFoundError(`library item "${id}" not found`)
}
return libraryItem
}
/**
* Maps custom metadata providers to standardized format
* @param {Array} providers - Array of custom provider objects
* @returns {Array<{value: string, text: string}>}
*/
static mapCustomProviders(providers) {
return providers.map((provider) => ({
value: provider.getSlug(),
text: provider.name
}))
}
/**
* Static helper method to format provider for client (for use in array methods)
* @param {string} providerValue - Provider identifier
* @returns {{value: string, text: string}}
*/
static formatProvider(providerValue) {
return {
value: providerValue,
text: providerMap[providerValue] || providerValue
}
}
/**
* GET: /api/search/books
*
@ -23,19 +83,25 @@ class SearchController {
* @param {Response} res
*/
async findBooks(req, res) {
const id = req.query.id
const libraryItem = await Database.libraryItemModel.getExpandedById(id)
const provider = req.query.provider || 'google'
const title = req.query.title || ''
const author = req.query.author || ''
try {
const query = req.query
const provider = getQueryParamAsString(query, 'provider', 'google')
const title = getQueryParamAsString(query, 'title', '')
const author = getQueryParamAsString(query, 'author', '')
const id = getQueryParamAsString(query, 'id', undefined)
if (typeof provider !== 'string' || typeof title !== 'string' || typeof author !== 'string') {
Logger.error(`[SearchController] findBooks: Invalid request query params`)
return res.status(400).send('Invalid request query params')
// Fetch library item
const libraryItem = id ? await SearchController.fetchLibraryItem(id) : null
const results = await BookFinder.search(libraryItem, provider, title, author)
res.json(results)
} catch (error) {
Logger.error(`[SearchController] findBooks: ${error.message}`)
if (error instanceof ValidationError || error instanceof NotFoundError) {
return res.status(error.status).json({ error: error.message })
}
return res.status(500).json({ error: 'Internal server error' })
}
const results = await BookFinder.search(libraryItem, provider, title, author)
res.json(results)
}
/**
@ -45,20 +111,24 @@ class SearchController {
* @param {Response} res
*/
async findCovers(req, res) {
const query = req.query
const podcast = query.podcast == 1
try {
const query = req.query
const podcast = query.podcast === '1' || query.podcast === 1
const title = getQueryParamAsString(query, 'title', '', true)
const author = getQueryParamAsString(query, 'author', '')
const provider = getQueryParamAsString(query, 'provider', 'google')
if (!query.title || typeof query.title !== 'string') {
Logger.error(`[SearchController] findCovers: Invalid title sent in query`)
return res.sendStatus(400)
let results = null
if (podcast) results = await PodcastFinder.findCovers(title)
else results = await BookFinder.findCovers(provider, title, author)
res.json({ results })
} catch (error) {
Logger.error(`[SearchController] findCovers: ${error.message}`)
if (error instanceof ValidationError) {
return res.status(error.status).json({ error: error.message })
}
return res.status(500).json({ error: 'Internal server error' })
}
let results = null
if (podcast) results = await PodcastFinder.findCovers(query.title)
else results = await BookFinder.findCovers(query.provider || 'google', query.title, query.author || '')
res.json({
results
})
}
/**
@ -69,34 +139,42 @@ class SearchController {
* @param {Response} res
*/
async findPodcasts(req, res) {
const term = req.query.term
const country = req.query.country || 'us'
if (!term) {
Logger.error('[SearchController] Invalid request query param "term" is required')
return res.status(400).send('Invalid request query param "term" is required')
}
try {
const query = req.query
const term = getQueryParamAsString(query, 'term', '', true)
const country = getQueryParamAsString(query, 'country', 'us')
const results = await PodcastFinder.search(term, {
country
})
res.json(results)
const results = await PodcastFinder.search(term, { country })
res.json(results)
} catch (error) {
Logger.error(`[SearchController] findPodcasts: ${error.message}`)
if (error instanceof ValidationError) {
return res.status(error.status).json({ error: error.message })
}
return res.status(500).json({ error: 'Internal server error' })
}
}
/**
* GET: /api/search/authors
* Note: This endpoint is not currently used in the web client.
*
* @param {RequestWithUser} req
* @param {Response} res
*/
async findAuthor(req, res) {
const query = req.query.q
if (!query || typeof query !== 'string') {
Logger.error(`[SearchController] findAuthor: Invalid query param`)
return res.status(400).send('Invalid query param')
}
try {
const query = getQueryParamAsString(req.query, 'q', '', true)
const author = await AuthorFinder.findAuthorByName(query)
res.json(author)
const author = await AuthorFinder.findAuthorByName(query)
res.json(author)
} catch (error) {
Logger.error(`[SearchController] findAuthor: ${error.message}`)
if (error instanceof ValidationError) {
return res.status(error.status).json({ error: error.message })
}
return res.status(500).json({ error: 'Internal server error' })
}
}
/**
@ -106,16 +184,55 @@ class SearchController {
* @param {Response} res
*/
async findChapters(req, res) {
const asin = req.query.asin
if (!isValidASIN(asin.toUpperCase())) {
return res.json({ error: 'Invalid ASIN', stringKey: 'MessageInvalidAsin' })
try {
const query = req.query
const asin = getQueryParamAsString(query, 'asin', '', true)
const region = getQueryParamAsString(query, 'region', 'us').toLowerCase()
if (!isValidASIN(asin.toUpperCase())) throw new ValidationError('asin', 'is invalid')
const chapterData = await BookFinder.findChapters(asin, region)
if (!chapterData) {
return res.json({ error: 'Chapters not found', stringKey: 'MessageChaptersNotFound' })
}
res.json(chapterData)
} catch (error) {
Logger.error(`[SearchController] findChapters: ${error.message}`)
if (error instanceof ValidationError) {
if (error.paramName === 'asin') {
return res.json({ error: 'Invalid ASIN', stringKey: 'MessageInvalidAsin' })
}
if (error.paramName === 'region') {
return res.json({ error: 'Invalid region', stringKey: 'MessageInvalidRegion' })
}
}
return res.status(500).json({ error: 'Internal server error' })
}
const region = (req.query.region || 'us').toLowerCase()
const chapterData = await BookFinder.findChapters(asin, region)
if (!chapterData) {
return res.json({ error: 'Chapters not found', stringKey: 'MessageChaptersNotFound' })
}
/**
* GET: /api/search/providers
* Get all available metadata providers
*
* @param {RequestWithUser} req
* @param {Response} res
*/
async getAllProviders(req, res) {
const customProviders = await Database.customMetadataProviderModel.findAll()
const customBookProviders = customProviders.filter((p) => p.mediaType === 'book')
const customPodcastProviders = customProviders.filter((p) => p.mediaType === 'podcast')
const bookProviders = BookFinder.providers.filter((p) => p !== 'audiobookcovers')
// Build minimized payload with custom providers merged in
const providers = {
books: [...bookProviders.map((p) => SearchController.formatProvider(p)), ...SearchController.mapCustomProviders(customBookProviders)],
booksCovers: [SearchController.formatProvider('best'), ...BookFinder.providers.map((p) => SearchController.formatProvider(p)), ...SearchController.mapCustomProviders(customBookProviders), SearchController.formatProvider('all')],
podcasts: [SearchController.formatProvider('itunes'), ...SearchController.mapCustomProviders(customPodcastProviders)]
}
res.json(chapterData)
res.json({ providers })
}
}
module.exports = new SearchController()

View file

@ -339,9 +339,9 @@ class SessionController {
var playbackSession = this.playbackSessionManager.getSession(req.params.id)
if (!playbackSession) return res.sendStatus(404)
if (playbackSession.userId !== req.user.id) {
Logger.error(`[SessionController] User "${req.user.username}" attempting to access session belonging to another user "${req.params.id}"`)
return res.sendStatus(404)
if (playbackSession.userId !== req.user.id && !req.user.isAdminOrUp) {
Logger.error(`[SessionController] Non-admin user "${req.user.username}" attempting to access session belonging to another user "${req.params.id}"`)
return res.sendStatus(403)
}
req.playbackSession = playbackSession

View file

@ -11,7 +11,7 @@ const { levenshteinDistance, levenshteinSimilarity, escapeRegExp, isValidASIN }
const htmlSanitizer = require('../utils/htmlSanitizer')
class BookFinder {
#providerResponseTimeout = 30000
#providerResponseTimeout = 10000
constructor() {
this.openLibrary = new OpenLibrary()
@ -385,6 +385,11 @@ class BookFinder {
if (!title) return books
// Truncate excessively long inputs to prevent ReDoS attacks
const MAX_INPUT_LENGTH = 500
title = title.substring(0, MAX_INPUT_LENGTH)
author = author?.substring(0, MAX_INPUT_LENGTH) || author
const isTitleAsin = isValidASIN(title.toUpperCase())
let actualTitleQuery = title
@ -402,7 +407,8 @@ class BookFinder {
let authorCandidates = new BookFinder.AuthorCandidates(cleanAuthor, this.audnexus)
// Remove underscores and parentheses with their contents, and replace with a separator
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}|_/g, ' - ')
// Use negated character classes to prevent ReDoS vulnerability (input length validated at entry point)
const cleanTitle = title.replace(/\[[^\]]*\]|\([^)]*\)|{[^}]*}|_/g, ' - ')
// Split title into hypen-separated parts
const titleParts = cleanTitle.split(/ - | -|- /)
for (const titlePart of titleParts) authorCandidates.add(titlePart)
@ -422,7 +428,7 @@ class BookFinder {
}
}
if (books.length) {
if (books.length && libraryItem) {
const isAudibleProvider = provider.startsWith('audible')
const libraryItemDurationMinutes = libraryItem?.media?.duration ? libraryItem.media.duration / 60 : null
@ -608,6 +614,14 @@ class BookFinder {
Logger.debug(`[BookFinder] Found ${providerResults.length} covers from ${providerString}`)
searchResults.push(...providerResults)
}
} else if (provider === 'best') {
// Best providers: google, fantlab, and audible.com
const bestProviders = ['google', 'fantlab', 'audible']
for (const providerString of bestProviders) {
const providerResults = await this.search(null, providerString, title, author, options)
Logger.debug(`[BookFinder] Found ${providerResults.length} covers from ${providerString}`)
searchResults.push(...providerResults)
}
} else {
searchResults = await this.search(null, provider, title, author, options)
}
@ -660,7 +674,9 @@ function cleanTitleForCompares(title, keepSubtitle = false) {
let stripped = keepSubtitle ? title : stripSubtitle(title)
// Remove text in paranthesis (i.e. "Ender's Game (Ender's Saga)" becomes "Ender's Game")
let cleaned = stripped.replace(/ *\([^)]*\) */g, '')
// Use negated character class to prevent ReDoS vulnerability (input length validated at entry point)
let cleaned = stripped.replace(/\([^)]*\)/g, '') // Remove parenthetical content
cleaned = cleaned.replace(/\s+/g, ' ').trim() // Clean up any resulting multiple spaces
// Remove single quotes (i.e. "Ender's Game" becomes "Enders Game")
cleaned = cleaned.replace(/'/g, '')

View file

@ -7,9 +7,9 @@ class PodcastFinder {
}
/**
*
* @param {string} term
* @param {{country:string}} options
*
* @param {string} term
* @param {{country:string}} options
* @returns {Promise<import('../providers/iTunes').iTunesPodcastSearchResult[]>}
*/
async search(term, options = {}) {
@ -20,12 +20,16 @@ class PodcastFinder {
return results
}
/**
* @param {string} term
* @returns {Promise<string[]>}
*/
async findCovers(term) {
if (!term) return null
Logger.debug(`[iTunes] Searching for podcast covers with term "${term}"`)
var results = await this.iTunesApi.searchPodcasts(term)
const results = await this.iTunesApi.searchPodcasts(term)
if (!results) return []
return results.map(r => r.cover).filter(r => r)
return results.map((r) => r.cover).filter((r) => r)
}
}
module.exports = new PodcastFinder()
module.exports = new PodcastFinder()

View file

@ -48,9 +48,14 @@ class BackupManager {
}
async init() {
const backupsDirExists = await fs.pathExists(this.backupPath)
if (!backupsDirExists) {
await fs.ensureDir(this.backupPath)
try {
const backupsDirExists = await fs.pathExists(this.backupPath)
if (!backupsDirExists) {
await fs.ensureDir(this.backupPath)
}
} catch (error) {
Logger.error(`[BackupManager] Failed to ensure backup directory at "${this.backupPath}": ${error.message}`)
throw new Error(`[BackupManager] Failed to ensure backup directory at "${this.backupPath}"`, { cause: error })
}
await this.loadBackups()

View file

@ -24,10 +24,15 @@ class CacheManager {
this.ImageCachePath = Path.join(this.CachePath, 'images')
this.ItemCachePath = Path.join(this.CachePath, 'items')
await fs.ensureDir(this.CachePath)
await fs.ensureDir(this.CoverCachePath)
await fs.ensureDir(this.ImageCachePath)
await fs.ensureDir(this.ItemCachePath)
try {
await fs.ensureDir(this.CachePath)
await fs.ensureDir(this.CoverCachePath)
await fs.ensureDir(this.ImageCachePath)
await fs.ensureDir(this.ItemCachePath)
} catch (error) {
Logger.error(`[CacheManager] Failed to create cache directories at "${this.CachePath}": ${error.message}`)
throw new Error(`[CacheManager] Failed to create cache directories at "${this.CachePath}"`, { cause: error })
}
}
async handleCoverCache(res, libraryItemId, options = {}) {

View file

@ -0,0 +1,254 @@
const { setMaxListeners } = require('events')
const Logger = require('../Logger')
const BookFinder = require('../finders/BookFinder')
const PodcastFinder = require('../finders/PodcastFinder')
/**
* Manager for handling streaming cover search across multiple providers
*/
class CoverSearchManager {
constructor() {
/** @type {Map<string, AbortController>} Map of requestId to AbortController */
this.activeSearches = new Map()
// Default timeout for each provider search
this.providerTimeout = 10000 // 10 seconds
// Set to 0 to disable the max listeners limit
// We need one listener per provider (15+) and may have multiple concurrent searches
this.maxListeners = 0
}
/**
* Start a streaming cover search
* @param {string} requestId - Unique identifier for this search request
* @param {Object} searchParams - Search parameters
* @param {string} searchParams.title - Title to search for
* @param {string} searchParams.author - Author to search for (optional)
* @param {string} searchParams.provider - Provider to search (or 'all')
* @param {boolean} searchParams.podcast - Whether this is a podcast search
* @param {Function} onResult - Callback for each result chunk
* @param {Function} onComplete - Callback when search completes
* @param {Function} onError - Callback for errors
*/
async startSearch(requestId, searchParams, onResult, onComplete, onError) {
if (this.activeSearches.has(requestId)) {
Logger.warn(`[CoverSearchManager] Search with requestId ${requestId} already exists`)
return
}
const abortController = new AbortController()
// Increase max listeners on this signal to accommodate parallel provider searches
// AbortSignal is an EventTarget, so we use the events module's setMaxListeners
setMaxListeners(this.maxListeners, abortController.signal)
this.activeSearches.set(requestId, abortController)
Logger.info(`[CoverSearchManager] Starting search ${requestId} with params:`, searchParams)
try {
const { title, author, provider, podcast } = searchParams
if (podcast) {
await this.searchPodcastCovers(requestId, title, abortController.signal, onResult, onError)
} else {
await this.searchBookCovers(requestId, provider, title, author, abortController.signal, onResult, onError)
}
if (!abortController.signal.aborted) {
onComplete()
}
} catch (error) {
if (error.name === 'AbortError') {
Logger.info(`[CoverSearchManager] Search ${requestId} was cancelled`)
} else {
Logger.error(`[CoverSearchManager] Search ${requestId} failed:`, error)
onError(error.message)
}
} finally {
this.activeSearches.delete(requestId)
}
}
/**
* Cancel an active search
* @param {string} requestId - Request ID to cancel
*/
cancelSearch(requestId) {
const abortController = this.activeSearches.get(requestId)
if (abortController) {
Logger.info(`[CoverSearchManager] Cancelling search ${requestId}`)
abortController.abort()
this.activeSearches.delete(requestId)
return true
}
return false
}
/**
* Search for podcast covers
*/
async searchPodcastCovers(requestId, title, signal, onResult, onError) {
try {
const results = await this.executeWithTimeout(() => PodcastFinder.findCovers(title), this.providerTimeout, signal)
if (signal.aborted) return
const covers = this.extractCoversFromResults(results)
if (covers.length > 0) {
onResult({
provider: 'itunes',
covers,
total: covers.length
})
}
} catch (error) {
if (error.name !== 'AbortError') {
Logger.error(`[CoverSearchManager] Podcast search failed:`, error)
onError('itunes', error.message)
}
}
}
/**
* Search for book covers across providers
*/
async searchBookCovers(requestId, provider, title, author, signal, onResult, onError) {
let providers = []
if (provider === 'all') {
providers = [...BookFinder.providers]
} else if (provider === 'best') {
// Best providers: google, fantlab, and audible.com
providers = ['google', 'fantlab', 'audible']
} else {
providers = [provider]
}
Logger.debug(`[CoverSearchManager] Searching ${providers.length} providers in parallel`)
// Search all providers in parallel
const searchPromises = providers.map(async (providerName) => {
if (signal.aborted) return
try {
const searchResults = await this.executeWithTimeout(() => BookFinder.search(null, providerName, title, author || ''), this.providerTimeout, signal)
if (signal.aborted) return
const covers = this.extractCoversFromResults(searchResults)
Logger.debug(`[CoverSearchManager] Found ${covers.length} covers from ${providerName}`)
if (covers.length > 0) {
onResult({
provider: providerName,
covers,
total: covers.length
})
}
} catch (error) {
if (error.name !== 'AbortError') {
Logger.warn(`[CoverSearchManager] Provider ${providerName} failed:`, error.message)
onError(providerName, error.message)
}
}
})
await Promise.allSettled(searchPromises)
}
/**
* Execute a promise with timeout and abort signal
*/
async executeWithTimeout(fn, timeout, signal) {
return new Promise(async (resolve, reject) => {
let abortHandler = null
let timeoutId = null
// Cleanup function to ensure we always remove listeners
const cleanup = () => {
if (timeoutId) {
clearTimeout(timeoutId)
timeoutId = null
}
if (abortHandler) {
signal.removeEventListener('abort', abortHandler)
abortHandler = null
}
}
// Set up timeout
timeoutId = setTimeout(() => {
cleanup()
const error = new Error('Provider timeout')
error.name = 'TimeoutError'
reject(error)
}, timeout)
// Check if already aborted
if (signal.aborted) {
cleanup()
const error = new Error('Search cancelled')
error.name = 'AbortError'
reject(error)
return
}
// Set up abort handler
abortHandler = () => {
cleanup()
const error = new Error('Search cancelled')
error.name = 'AbortError'
reject(error)
}
signal.addEventListener('abort', abortHandler)
try {
const result = await fn()
cleanup()
resolve(result)
} catch (error) {
cleanup()
reject(error)
}
})
}
/**
* Extract cover URLs from search results
*/
extractCoversFromResults(results) {
const covers = []
if (!Array.isArray(results)) return covers
results.forEach((result) => {
if (typeof result === 'string') {
covers.push(result)
}
if (result.covers && Array.isArray(result.covers)) {
covers.push(...result.covers)
}
if (result.cover) {
covers.push(result.cover)
}
})
// Remove duplicates
return [...new Set(covers)]
}
/**
* Cancel all active searches (cleanup on server shutdown)
*/
cancelAllSearches() {
Logger.info(`[CoverSearchManager] Cancelling ${this.activeSearches.size} active searches`)
for (const [requestId, abortController] of this.activeSearches.entries()) {
abortController.abort()
}
this.activeSearches.clear()
}
}
module.exports = new CoverSearchManager()

View file

@ -37,8 +37,13 @@ class LogManager {
}
async ensureLogDirs() {
await fs.ensureDir(this.DailyLogPath)
await fs.ensureDir(this.ScanLogPath)
try {
await fs.ensureDir(this.DailyLogPath)
await fs.ensureDir(this.ScanLogPath)
} catch (error) {
console.error(`[LogManager] Failed to create log directories at "${this.DailyLogPath}": ${error.message}`)
throw new Error(`[LogManager] Failed to create log directories at "${this.DailyLogPath}"`, { cause: error })
}
}
/**
@ -169,7 +174,7 @@ class LogManager {
/**
* Most recent 5000 daily logs
*
* @returns {LogObject[]}
* @returns {string}
*/
getMostRecentCurrentDailyLogs() {
return this.currentDailyLog?.logs.slice(-5000) || []

View file

@ -38,7 +38,12 @@ class MigrationManager {
if (!(await fs.pathExists(this.configPath))) throw new Error(`Config path does not exist: ${this.configPath}`)
this.migrationsDir = path.join(this.configPath, 'migrations')
await fs.ensureDir(this.migrationsDir)
try {
await fs.ensureDir(this.migrationsDir)
} catch (error) {
Logger.error(`[MigrationManager] Failed to create migrations directory at "${this.migrationsDir}": ${error.message}`)
throw new Error(`[MigrationManager] Failed to create migrations directory at "${this.migrationsDir}"`, { cause: error })
}
this.serverVersion = this.extractVersionFromTag(serverVersion)
if (!this.serverVersion) throw new Error(`Invalid server version: ${serverVersion}. Expected a version tag like v1.2.3.`)

View file

@ -459,7 +459,12 @@ class PlaybackSessionManager {
* Remove all stream folders in `/metadata/streams`
*/
async removeOrphanStreams() {
await fs.ensureDir(this.StreamsPath)
try {
await fs.ensureDir(this.StreamsPath)
} catch (error) {
Logger.error(`[PlaybackSessionManager] Failed to create streams directory at "${this.StreamsPath}": ${error.message}`)
throw new Error(`[PlaybackSessionManager] Failed to create streams directory at "${this.StreamsPath}"`, { cause: error })
}
try {
const streamsInPath = await fs.readdir(this.StreamsPath)
for (const streamId of streamsInPath) {

View file

@ -127,10 +127,20 @@ class PodcastManager {
})
let success = !!ffmpegDownloadResponse?.success
// If failed due to ffmpeg error, retry without tagging
if (success) {
// Attempt to ffprobe and add podcast episode audio file
success = await this.scanAddPodcastEpisodeAudioFile()
if (!success) {
Logger.error(`[PodcastManager] Failed to scan and add podcast episode audio file - removing file`)
await fs.remove(this.currentDownload.targetPath)
}
}
// If failed due to ffmpeg or ffprobe error, retry without tagging
// e.g. RSS feed may have incorrect file extension and file type
// See https://github.com/advplyr/audiobookshelf/issues/3837
if (!success && ffmpegDownloadResponse?.isFfmpegError) {
// e.g. Ffmpeg may be download the file without streams causing the ffprobe to fail
if (!success && !ffmpegDownloadResponse?.isRequestError) {
Logger.info(`[PodcastManager] Retrying episode download without tagging`)
// Download episode only
success = await downloadFile(this.currentDownload.url, this.currentDownload.targetPath)
@ -139,23 +149,20 @@ class PodcastManager {
Logger.error(`[PodcastManager] Podcast Episode download failed`, error)
return false
})
if (success) {
success = await this.scanAddPodcastEpisodeAudioFile()
if (!success) {
Logger.error(`[PodcastManager] Failed to scan and add podcast episode audio file - removing file`)
await fs.remove(this.currentDownload.targetPath)
}
}
}
if (success) {
success = await this.scanAddPodcastEpisodeAudioFile()
if (!success) {
await fs.remove(this.currentDownload.targetPath)
this.currentDownload.setFinished(false)
const taskFailedString = {
text: 'Failed',
key: 'MessageTaskFailed'
}
task.setFailed(taskFailedString)
} else {
Logger.info(`[PodcastManager] Successfully downloaded podcast episode "${this.currentDownload.episodeTitle}"`)
this.currentDownload.setFinished(true)
task.setFinished()
}
Logger.info(`[PodcastManager] Successfully downloaded podcast episode "${this.currentDownload.episodeTitle}"`)
this.currentDownload.setFinished(true)
task.setFinished()
} else {
const taskFailedString = {
text: 'Failed',

View file

@ -73,7 +73,7 @@ class Stream extends EventEmitter {
return [AudioMimeType.FLAC, AudioMimeType.OPUS, AudioMimeType.WMA, AudioMimeType.AIFF, AudioMimeType.WEBM, AudioMimeType.WEBMA, AudioMimeType.AWB, AudioMimeType.CAF]
}
get codecsToForceAAC() {
return ['alac']
return ['alac', 'ac3', 'eac3']
}
get userToken() {
return this.user.token
@ -273,7 +273,16 @@ class Stream extends EventEmitter {
audioCodec = 'aac'
}
this.ffmpeg.addOption([`-loglevel ${logLevel}`, '-map 0:a', `-c:a ${audioCodec}`])
const codecOptions = [`-loglevel ${logLevel}`, '-map 0:a']
if (['ac3', 'eac3'].includes(this.tracksCodec) && this.tracks.length > 0 && this.tracks[0].bitRate && this.tracks[0].channels) {
// In case for ac3/eac3 it needs to be passed the bitrate and channels to avoid ffmpeg errors
codecOptions.push(`-c:a ${audioCodec}`, `-b:a ${this.tracks[0].bitRate}`, `-ac ${this.tracks[0].channels}`)
} else {
codecOptions.push(`-c:a ${audioCodec}`)
}
this.ffmpeg.addOption(codecOptions)
const hlsOptions = ['-f hls', '-copyts', '-avoid_negative_ts make_non_negative', '-max_delay 5000000', '-max_muxing_queue_size 2048', `-hls_time 6`, `-hls_segment_type ${this.hlsSegmentType}`, `-start_number ${this.segmentStartNumber}`, '-hls_playlist_type vod', '-hls_list_size 0', '-hls_allow_cache 0']
this.ffmpeg.addOption(hlsOptions)
if (this.hlsSegmentType === 'fmp4') {

View file

@ -3,7 +3,7 @@ const Logger = require('../Logger')
const { isValidASIN } = require('../utils/index')
class Audible {
#responseTimeout = 30000
#responseTimeout = 10000
constructor() {
this.regionMap = {
@ -106,7 +106,7 @@ class Audible {
return res.data
})
.catch((error) => {
Logger.error('[Audible] ASIN search error', error)
Logger.error('[Audible] ASIN search error', error.message)
return null
})
}
@ -158,7 +158,7 @@ class Audible {
return Promise.all(res.data.products.map((result) => this.asinSearch(result.asin, region, timeout)))
})
.catch((error) => {
Logger.error('[Audible] query search error', error)
Logger.error('[Audible] query search error', error.message)
return []
})
}

View file

@ -2,7 +2,7 @@ const axios = require('axios')
const Logger = require('../Logger')
class AudiobookCovers {
#responseTimeout = 30000
#responseTimeout = 10000
constructor() {}
@ -24,7 +24,7 @@ class AudiobookCovers {
})
.then((res) => res?.data || [])
.catch((error) => {
Logger.error('[AudiobookCovers] Cover search error', error)
Logger.error('[AudiobookCovers] Cover search error', error.message)
return []
})
return items.map((item) => ({ cover: item.versions.png.original }))

View file

@ -55,7 +55,7 @@ class Audnexus {
return this._processRequest(this.limiter(() => axios.get(authorRequestUrl)))
.then((res) => res.data || [])
.catch((error) => {
Logger.error(`[Audnexus] Author ASIN request failed for ${name}`, error)
Logger.error(`[Audnexus] Author ASIN request failed for ${name}`, error.message)
return []
})
}
@ -82,7 +82,7 @@ class Audnexus {
return this._processRequest(this.limiter(() => axios.get(authorRequestUrl.toString())))
.then((res) => res.data)
.catch((error) => {
Logger.error(`[Audnexus] Author request failed for ${asin}`, error)
Logger.error(`[Audnexus] Author request failed for ${asin}`, error.message)
return null
})
}
@ -158,7 +158,7 @@ class Audnexus {
return this._processRequest(this.limiter(() => axios.get(chaptersRequestUrl.toString())))
.then((res) => res.data)
.catch((error) => {
Logger.error(`[Audnexus] Chapter ASIN request failed for ${asin}/${region}`, error)
Logger.error(`[Audnexus] Chapter ASIN request failed for ${asin}/${region}`, error.message)
return null
})
}

View file

@ -4,7 +4,7 @@ const Logger = require('../Logger')
const htmlSanitizer = require('../utils/htmlSanitizer')
class CustomProviderAdapter {
#responseTimeout = 30000
#responseTimeout = 10000
constructor() {}
@ -61,7 +61,7 @@ class CustomProviderAdapter {
return res.data.matches
})
.catch((error) => {
Logger.error('[CustomMetadataProvider] Search error', error)
Logger.error('[CustomMetadataProvider] Search error', error.message)
return []
})

View file

@ -2,7 +2,7 @@ const axios = require('axios')
const Logger = require('../Logger')
class FantLab {
#responseTimeout = 30000
#responseTimeout = 10000
// 7 - other
// 11 - essay
// 12 - article
@ -48,7 +48,7 @@ class FantLab {
return res.data || []
})
.catch((error) => {
Logger.error('[FantLab] search error', error)
Logger.error('[FantLab] search error', error.message)
return []
})
@ -77,7 +77,7 @@ class FantLab {
return resp.data || null
})
.catch((error) => {
Logger.error(`[FantLab] work info request for url "${url}" error`, error)
Logger.error(`[FantLab] work info request for url "${url}" error`, error.message)
return null
})
@ -193,7 +193,7 @@ class FantLab {
return resp.data || null
})
.catch((error) => {
Logger.error(`[FantLab] search cover from edition with url "${url}" error`, error)
Logger.error(`[FantLab] search cover from edition with url "${url}" error`, error.message)
return null
})

View file

@ -2,7 +2,7 @@ const axios = require('axios')
const Logger = require('../Logger')
class GoogleBooks {
#responseTimeout = 30000
#responseTimeout = 10000
constructor() {}
@ -67,7 +67,7 @@ class GoogleBooks {
return res.data.items
})
.catch((error) => {
Logger.error('[GoogleBooks] Volume search error', error)
Logger.error('[GoogleBooks] Volume search error', error.message)
return []
})
return items.map((item) => this.cleanResult(item))

View file

@ -1,7 +1,7 @@
const axios = require('axios').default
class OpenLibrary {
#responseTimeout = 30000
#responseTimeout = 10000
constructor() {
this.baseUrl = 'https://openlibrary.org'
@ -23,7 +23,7 @@ class OpenLibrary {
return res.data
})
.catch((error) => {
console.error('Failed', error)
console.error('Failed', error.message)
return null
})
}

View file

@ -28,7 +28,7 @@ const htmlSanitizer = require('../utils/htmlSanitizer')
*/
class iTunes {
#responseTimeout = 30000
#responseTimeout = 10000
constructor() {}
@ -63,7 +63,7 @@ class iTunes {
return response.data.results || []
})
.catch((error) => {
Logger.error(`[iTunes] search request error`, error)
Logger.error(`[iTunes] search request error`, error.message)
return []
})
}

View file

@ -283,6 +283,7 @@ class ApiRouter {
this.router.get('/search/podcast', SearchController.findPodcasts.bind(this))
this.router.get('/search/authors', SearchController.findAuthor.bind(this))
this.router.get('/search/chapters', SearchController.findChapters.bind(this))
this.router.get('/search/providers', SearchController.getAllProviders.bind(this))
//
// Cache Routes (Admin and up)

View file

@ -99,7 +99,7 @@ module.exports.resizeImage = resizeImage
/**
*
* @param {import('../objects/PodcastEpisodeDownload')} podcastEpisodeDownload
* @returns {Promise<{success: boolean, isFfmpegError?: boolean}>}
* @returns {Promise<{success: boolean, isRequestError?: boolean}>}
*/
module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
return new Promise(async (resolve) => {
@ -118,7 +118,7 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
method: 'GET',
responseType: 'stream',
headers: {
'Accept': '*/*',
Accept: '*/*',
'User-Agent': userAgent
},
timeout: global.PodcastDownloadTimeout
@ -139,7 +139,8 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
if (!response) {
return resolve({
success: false
success: false,
isRequestError: true
})
}
@ -204,8 +205,7 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
Logger.error(`Full stderr dump for episode url "${podcastEpisodeDownload.url}": ${stderrLines.join('\n')}`)
}
resolve({
success: false,
isFfmpegError: true
success: false
})
})
ffmpeg.on('progress', (progress) => {

View file

@ -277,3 +277,57 @@ module.exports.timestampToSeconds = (timestamp) => {
}
return null
}
class ValidationError extends Error {
constructor(paramName, message, status = 400) {
super(`Query parameter "${paramName}" ${message}`)
this.name = 'ValidationError'
this.paramName = paramName
this.status = status
}
}
module.exports.ValidationError = ValidationError
class NotFoundError extends Error {
constructor(message, status = 404) {
super(message)
this.name = 'NotFoundError'
this.status = status
}
}
module.exports.NotFoundError = NotFoundError
/**
* Safely extracts a query parameter as a string, rejecting arrays to prevent type confusion
* Express query parameters can be arrays if the same parameter appears multiple times
* @example ?author=Smith => "Smith"
* @example ?author=Smith&author=Jones => throws error
*
* @param {Object} query - Query object
* @param {string} paramName - Parameter name
* @param {string} defaultValue - Default value if undefined/null
* @param {boolean} required - Whether the parameter is required
* @param {number} maxLength - Optional maximum length (defaults to 10000 to prevent ReDoS attacks)
* @returns {string} String value
* @throws {ValidationError} If value is an array
* @throws {ValidationError} If value is too long
* @throws {ValidationError} If value is required but not provided
*/
module.exports.getQueryParamAsString = (query, paramName, defaultValue = '', required = false, maxLength = 1000) => {
const value = query[paramName]
if (value === undefined || value === null) {
if (required) {
throw new ValidationError(paramName, 'is required')
}
return defaultValue
}
// Explicitly reject arrays to prevent type confusion
if (Array.isArray(value)) {
throw new ValidationError(paramName, 'is an array')
}
// Reject excessively long strings to prevent ReDoS attacks
if (typeof value === 'string' && value.length > maxLength) {
throw new ValidationError(paramName, 'is too long')
}
return String(value)
}

View file

@ -171,7 +171,7 @@ function extractEpisodeData(item) {
// Full description with html
if (item['content:encoded']) {
const rawDescription = (extractFirstArrayItem(item, 'content:encoded') || '').trim()
const rawDescription = (extractFirstArrayItemString(item, 'content:encoded') || '').trim()
episode.description = htmlSanitizer.sanitize(rawDescription)
}