mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-12-31 16:19:39 +00:00
Merge branch 'master' into Fuzzy-Matching-Continued
This commit is contained in:
commit
786df450e5
34 changed files with 489 additions and 346 deletions
|
|
@ -92,7 +92,7 @@ class Logger {
|
|||
* @param {...any} args
|
||||
*/
|
||||
dev(...args) {
|
||||
if (!this.isDev) return
|
||||
if (!this.isDev || process.env.HIDE_DEV_LOGS === '1') return
|
||||
console.log(`[${this.timestamp}] DEV:`, ...args)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,8 @@ class FolderWatcher extends EventEmitter {
|
|||
this.ignoreDirs = []
|
||||
/** @type {string[]} */
|
||||
this.pendingDirsToRemoveFromIgnore = []
|
||||
/** @type {NodeJS.Timeout} */
|
||||
this.removeFromIgnoreTimer = null
|
||||
|
||||
this.disabled = false
|
||||
}
|
||||
|
|
@ -240,9 +242,12 @@ class FolderWatcher extends EventEmitter {
|
|||
*/
|
||||
addIgnoreDir(path) {
|
||||
path = this.cleanDirPath(path)
|
||||
if (this.ignoreDirs.includes(path)) return
|
||||
this.pendingDirsToRemoveFromIgnore = this.pendingDirsToRemoveFromIgnore.filter(p => p !== path)
|
||||
Logger.debug(`[Watcher] Ignoring directory "${path}"`)
|
||||
if (this.ignoreDirs.includes(path)) {
|
||||
// Already ignoring dir
|
||||
return
|
||||
}
|
||||
Logger.debug(`[Watcher] addIgnoreDir: Ignoring directory "${path}"`)
|
||||
this.ignoreDirs.push(path)
|
||||
}
|
||||
|
||||
|
|
@ -255,18 +260,24 @@ class FolderWatcher extends EventEmitter {
|
|||
*/
|
||||
removeIgnoreDir(path) {
|
||||
path = this.cleanDirPath(path)
|
||||
if (!this.ignoreDirs.includes(path) || this.pendingDirsToRemoveFromIgnore.includes(path)) return
|
||||
if (!this.ignoreDirs.includes(path)) {
|
||||
Logger.debug(`[Watcher] removeIgnoreDir: Path is not being ignored "${path}"`)
|
||||
return
|
||||
}
|
||||
|
||||
// Add a 5 second delay before removing the ignore from this dir
|
||||
this.pendingDirsToRemoveFromIgnore.push(path)
|
||||
setTimeout(() => {
|
||||
if (!this.pendingDirsToRemoveFromIgnore.includes(path)) {
|
||||
this.pendingDirsToRemoveFromIgnore.push(path)
|
||||
}
|
||||
|
||||
clearTimeout(this.removeFromIgnoreTimer)
|
||||
this.removeFromIgnoreTimer = setTimeout(() => {
|
||||
if (this.pendingDirsToRemoveFromIgnore.includes(path)) {
|
||||
this.pendingDirsToRemoveFromIgnore = this.pendingDirsToRemoveFromIgnore.filter(p => p !== path)
|
||||
Logger.debug(`[Watcher] No longer ignoring directory "${path}"`)
|
||||
Logger.debug(`[Watcher] removeIgnoreDir: No longer ignoring directory "${path}"`)
|
||||
this.ignoreDirs = this.ignoreDirs.filter(p => p !== path)
|
||||
}
|
||||
}, 5000)
|
||||
|
||||
}
|
||||
}
|
||||
module.exports = FolderWatcher
|
||||
|
|
@ -9,7 +9,8 @@ const libraryItemsBookFilters = require('../utils/queries/libraryItemsBookFilter
|
|||
const libraryItemFilters = require('../utils/queries/libraryItemFilters')
|
||||
const seriesFilters = require('../utils/queries/seriesFilters')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const { sort, createNewSortInstance } = require('../libs/fastSort')
|
||||
const { asciiOnlyToLowerCase } = require('../utils/index')
|
||||
const { createNewSortInstance } = require('../libs/fastSort')
|
||||
const naturalSort = createNewSortInstance({
|
||||
comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare
|
||||
})
|
||||
|
|
@ -555,7 +556,7 @@ class LibraryController {
|
|||
return res.status(400).send('No query string')
|
||||
}
|
||||
const limit = req.query.limit && !isNaN(req.query.limit) ? Number(req.query.limit) : 12
|
||||
const query = req.query.q.trim().toLowerCase()
|
||||
const query = asciiOnlyToLowerCase(req.query.q.trim())
|
||||
|
||||
const matches = await libraryItemFilters.search(req.user, req.library, query, limit)
|
||||
res.json(matches)
|
||||
|
|
|
|||
|
|
@ -259,7 +259,6 @@ class LibraryItemController {
|
|||
|
||||
// Check if library item media has a cover path
|
||||
if (!libraryItem.media.coverPath || !await fs.pathExists(libraryItem.media.coverPath)) {
|
||||
Logger.debug(`[LibraryItemController] getCover: Library item "${req.params.id}" has no cover path`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
|
|
@ -280,12 +279,6 @@ class LibraryItemController {
|
|||
return CacheManager.handleCoverCache(res, libraryItem.id, libraryItem.media.coverPath, options)
|
||||
}
|
||||
|
||||
// GET: api/items/:id/stream
|
||||
openStream(req, res) {
|
||||
// this.streamManager.openStreamApiRequest(res, req.user, req.libraryItem)
|
||||
res.sendStatus(500)
|
||||
}
|
||||
|
||||
// POST: api/items/:id/play
|
||||
startPlaybackSession(req, res) {
|
||||
if (!req.libraryItem.media.numTracks && req.libraryItem.mediaType !== 'video') {
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ class MeController {
|
|||
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(localProgress.libraryItemId)
|
||||
if (!libraryItem) {
|
||||
Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object no library item`, localProgress)
|
||||
Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object no library item with id "${localProgress.libraryItemId}"`, localProgress)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class SearchController {
|
|||
|
||||
let results = null
|
||||
if (podcast) results = await PodcastFinder.findCovers(query.title)
|
||||
else results = await BookFinder.findCovers(query.provider || 'google', query.title, query.author || null)
|
||||
else results = await BookFinder.findCovers(query.provider || 'google', query.title, query.author || '')
|
||||
res.json({
|
||||
results
|
||||
})
|
||||
|
|
|
|||
|
|
@ -115,6 +115,13 @@ class UserController {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH: /api/users/:id
|
||||
* Update user
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async update(req, res) {
|
||||
const user = req.reqUser
|
||||
|
||||
|
|
@ -126,6 +133,7 @@ class UserController {
|
|||
var account = req.body
|
||||
var shouldUpdateToken = false
|
||||
|
||||
// When changing username create a new API token
|
||||
if (account.username !== undefined && account.username !== user.username) {
|
||||
const usernameExists = await Database.userModel.getUserByUsername(account.username)
|
||||
if (usernameExists) {
|
||||
|
|
|
|||
|
|
@ -374,7 +374,7 @@ class BookFinder {
|
|||
if (!books.length && maxFuzzySearches > 0) {
|
||||
// Normalize title and author
|
||||
title = title.trim().toLowerCase()
|
||||
author = author.trim().toLowerCase()
|
||||
author = author?.trim().toLowerCase() || ''
|
||||
|
||||
const cleanAuthor = this.cleanAuthorForCompares(author)
|
||||
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ class BackupManager {
|
|||
let entries
|
||||
try {
|
||||
entries = await zip.entries()
|
||||
} catch(error){
|
||||
} catch (error) {
|
||||
// Not a valid zip file
|
||||
Logger.error('[BackupManager] Failed to read backup file - backup might not be a valid .zip file', tempPath, error)
|
||||
return res.status(400).send('Failed to read backup file - backup might not be a valid .zip file')
|
||||
|
|
@ -182,7 +182,6 @@ class BackupManager {
|
|||
data = await zip.entryData('details')
|
||||
} catch (error) {
|
||||
Logger.error(`[BackupManager] Failed to unzip backup "${fullFilePath}"`, error)
|
||||
await zip.close()
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -794,6 +794,9 @@ class LibraryItem extends Model {
|
|||
{
|
||||
fields: ['libraryId', 'mediaType']
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaId', 'mediaType']
|
||||
},
|
||||
{
|
||||
fields: ['birthtime']
|
||||
},
|
||||
|
|
|
|||
|
|
@ -59,6 +59,7 @@ class User extends Model {
|
|||
id: userExpanded.id,
|
||||
oldUserId: userExpanded.extraData?.oldUserId || null,
|
||||
username: userExpanded.username,
|
||||
email: userExpanded.email || null,
|
||||
pash: userExpanded.pash,
|
||||
type: userExpanded.type,
|
||||
token: userExpanded.token,
|
||||
|
|
@ -96,6 +97,7 @@ class User extends Model {
|
|||
return {
|
||||
id: oldUser.id,
|
||||
username: oldUser.username,
|
||||
email: oldUser.email || null,
|
||||
pash: oldUser.pash || null,
|
||||
type: oldUser.type || null,
|
||||
token: oldUser.token || null,
|
||||
|
|
|
|||
|
|
@ -168,7 +168,13 @@ class PlaybackSession {
|
|||
this.currentTime = session.currentTime || 0
|
||||
|
||||
this.startedAt = session.startedAt
|
||||
this.updatedAt = session.updatedAt || null
|
||||
this.updatedAt = session.updatedAt || session.startedAt
|
||||
|
||||
// Local playback sessions dont set this date field so set using updatedAt
|
||||
if (!this.date && session.updatedAt) {
|
||||
this.date = date.format(new Date(session.updatedAt), 'YYYY-MM-DD')
|
||||
this.dayOfWeek = date.format(new Date(session.updatedAt), 'dddd')
|
||||
}
|
||||
}
|
||||
|
||||
get mediaItemId() {
|
||||
|
|
|
|||
|
|
@ -339,9 +339,9 @@ class Stream extends EventEmitter {
|
|||
} else {
|
||||
Logger.error('Ffmpeg Err', '"' + err.message + '"')
|
||||
|
||||
// Temporary workaround for https://github.com/advplyr/audiobookshelf/issues/172
|
||||
const aacErrorMsg = 'ffmpeg exited with code 1: Could not write header for output file #0 (incorrect codec parameters ?)'
|
||||
if (audioCodec === 'copy' && this.isAACEncodable && err.message && err.message.startsWith(aacErrorMsg)) {
|
||||
// Temporary workaround for https://github.com/advplyr/audiobookshelf/issues/172 and https://github.com/advplyr/audiobookshelf/issues/2157
|
||||
const aacErrorMsg = 'ffmpeg exited with code 1:'
|
||||
if (audioCodec === 'copy' && this.isAACEncodable && err.message?.startsWith(aacErrorMsg)) {
|
||||
Logger.info(`[Stream] Re-attempting stream with AAC encode`)
|
||||
this.transcodeOptions.forceAAC = true
|
||||
this.reset(this.startTime)
|
||||
|
|
@ -435,4 +435,4 @@ class Stream extends EventEmitter {
|
|||
return newAudioTrack
|
||||
}
|
||||
}
|
||||
module.exports = Stream
|
||||
module.exports = Stream
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ class User {
|
|||
this.id = null
|
||||
this.oldUserId = null // TODO: Temp for keeping old access tokens
|
||||
this.username = null
|
||||
this.email = null
|
||||
this.pash = null
|
||||
this.type = null
|
||||
this.token = null
|
||||
|
|
@ -76,6 +77,7 @@ class User {
|
|||
id: this.id,
|
||||
oldUserId: this.oldUserId,
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
pash: this.pash,
|
||||
type: this.type,
|
||||
token: this.token,
|
||||
|
|
@ -97,6 +99,7 @@ class User {
|
|||
id: this.id,
|
||||
oldUserId: this.oldUserId,
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
type: this.type,
|
||||
token: (this.type === 'root' && hideRootToken) ? '' : this.token,
|
||||
mediaProgress: this.mediaProgress ? this.mediaProgress.map(li => li.toJSON()) : [],
|
||||
|
|
@ -140,6 +143,7 @@ class User {
|
|||
this.id = user.id
|
||||
this.oldUserId = user.oldUserId
|
||||
this.username = user.username
|
||||
this.email = user.email || null
|
||||
this.pash = user.pash
|
||||
this.type = user.type
|
||||
this.token = user.token
|
||||
|
|
@ -184,7 +188,7 @@ class User {
|
|||
update(payload) {
|
||||
var hasUpdates = false
|
||||
// Update the following keys:
|
||||
const keysToCheck = ['pash', 'type', 'username', 'isActive']
|
||||
const keysToCheck = ['pash', 'type', 'username', 'email', 'isActive']
|
||||
keysToCheck.forEach((key) => {
|
||||
if (payload[key] !== undefined) {
|
||||
if (key === 'isActive' || payload[key]) { // pash, type, username must evaluate to true (cannot be null or empty)
|
||||
|
|
|
|||
|
|
@ -1111,7 +1111,7 @@ class BookScanner {
|
|||
const result = await CoverManager.downloadCoverFromUrlNew(results[i], libraryItemId, libraryItemPath)
|
||||
|
||||
if (result.error) {
|
||||
Logger.error(`[Scanner] Failed to download cover from url "${results[i]}" | Attempt ${i + 1}`, result.error)
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to download cover from url "${results[i]}" | Attempt ${i + 1}`, result.error)
|
||||
} else if (result.cover) {
|
||||
return result.cover
|
||||
}
|
||||
|
|
|
|||
|
|
@ -166,4 +166,27 @@ module.exports.getTitleIgnorePrefix = (title) => {
|
|||
module.exports.getTitlePrefixAtEnd = (title) => {
|
||||
let [sort, prefix] = getTitleParts(title)
|
||||
return prefix ? `${sort}, ${prefix}` : title
|
||||
}
|
||||
|
||||
/**
|
||||
* to lower case for only ascii characters
|
||||
* used to handle sqlite that doesnt support unicode lower
|
||||
* @see https://github.com/advplyr/audiobookshelf/issues/2187
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
module.exports.asciiOnlyToLowerCase = (str) => {
|
||||
if (!str) return ''
|
||||
|
||||
let temp = ''
|
||||
for (let chars of str) {
|
||||
let value = chars.charCodeAt()
|
||||
if (value >= 65 && value <= 90) {
|
||||
temp += String.fromCharCode(value + 32)
|
||||
} else {
|
||||
temp += chars
|
||||
}
|
||||
}
|
||||
return temp
|
||||
}
|
||||
|
|
@ -205,6 +205,15 @@ module.exports = {
|
|||
}
|
||||
}
|
||||
]
|
||||
|
||||
// Handle library setting to hide single book series
|
||||
// TODO: Merge with existing query
|
||||
if (library.settings.hideSingleBookSeries) {
|
||||
seriesWhere.push(Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM books b, bookSeries bs WHERE bs.seriesId = series.id AND bs.bookId = b.id)`), {
|
||||
[Sequelize.Op.gt]: 1
|
||||
}))
|
||||
}
|
||||
|
||||
// Handle user permissions to only include series with at least 1 book
|
||||
// TODO: Simplify to a single query
|
||||
if (userPermissionBookWhere.bookWhere.length) {
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ const Sequelize = require('sequelize')
|
|||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const authorFilters = require('./authorFilters')
|
||||
const { asciiOnlyToLowerCase } = require('../index')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
|
@ -1013,7 +1014,8 @@ module.exports = {
|
|||
let matchText = null
|
||||
let matchKey = null
|
||||
for (const key of ['title', 'subtitle', 'asin', 'isbn']) {
|
||||
if (book[key]?.toLowerCase().includes(query)) {
|
||||
const valueToLower = asciiOnlyToLowerCase(book[key])
|
||||
if (valueToLower.includes(query)) {
|
||||
matchText = book[key]
|
||||
matchKey = key
|
||||
break
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
const Sequelize = require('sequelize')
|
||||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const { asciiOnlyToLowerCase } = require('../index')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
|
@ -247,7 +248,7 @@ module.exports = {
|
|||
podcastEpisodeWhere['$mediaProgresses.isFinished$'] = true
|
||||
}
|
||||
} else if (filterGroup === 'recent') {
|
||||
libraryItemWhere['createdAt'] = {
|
||||
podcastEpisodeWhere['createdAt'] = {
|
||||
[Sequelize.Op.gte]: new Date(new Date() - (60 * 24 * 60 * 60 * 1000)) // 60 days ago
|
||||
}
|
||||
}
|
||||
|
|
@ -364,7 +365,8 @@ module.exports = {
|
|||
let matchText = null
|
||||
let matchKey = null
|
||||
for (const key of ['title', 'author', 'itunesId', 'itunesArtistId']) {
|
||||
if (podcast[key]?.toLowerCase().includes(query)) {
|
||||
const valueToLower = asciiOnlyToLowerCase(podcast[key])
|
||||
if (valueToLower.includes(query)) {
|
||||
matchText = podcast[key]
|
||||
matchKey = key
|
||||
break
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue