mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-12-23 20:29:37 +00:00
Merge branch 'master' into auth_passportjs
This commit is contained in:
commit
2662e8f715
55 changed files with 1246 additions and 175 deletions
|
|
@ -166,10 +166,25 @@ class Database {
|
|||
*/
|
||||
async connect() {
|
||||
Logger.info(`[Database] Initializing db at "${this.dbPath}"`)
|
||||
|
||||
let logging = false
|
||||
let benchmark = false
|
||||
if (process.env.QUERY_LOGGING === "log") {
|
||||
// Setting QUERY_LOGGING=log will log all Sequelize queries before they run
|
||||
Logger.info(`[Database] Query logging enabled`)
|
||||
logging = (query) => Logger.dev(`Running the following query:\n ${query}`)
|
||||
} else if (process.env.QUERY_LOGGING === "benchmark") {
|
||||
// Setting QUERY_LOGGING=benchmark will log all Sequelize queries and their execution times, after they run
|
||||
Logger.info(`[Database] Query benchmarking enabled"`)
|
||||
logging = (query, time) => Logger.dev(`Ran the following query in ${time}ms:\n ${query}`)
|
||||
benchmark = true
|
||||
}
|
||||
|
||||
this.sequelize = new Sequelize({
|
||||
dialect: 'sqlite',
|
||||
storage: this.dbPath,
|
||||
logging: false,
|
||||
logging: logging,
|
||||
benchmark: benchmark,
|
||||
transactionType: 'IMMEDIATE'
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ class Logger {
|
|||
* @param {...any} args
|
||||
*/
|
||||
dev(...args) {
|
||||
if (!this.isDev) return
|
||||
if (!this.isDev || process.env.HIDE_DEV_LOGS === '1') return
|
||||
console.log(`[${this.timestamp}] DEV:`, ...args)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,8 @@ class FolderWatcher extends EventEmitter {
|
|||
this.ignoreDirs = []
|
||||
/** @type {string[]} */
|
||||
this.pendingDirsToRemoveFromIgnore = []
|
||||
/** @type {NodeJS.Timeout} */
|
||||
this.removeFromIgnoreTimer = null
|
||||
|
||||
this.disabled = false
|
||||
}
|
||||
|
|
@ -240,9 +242,12 @@ class FolderWatcher extends EventEmitter {
|
|||
*/
|
||||
addIgnoreDir(path) {
|
||||
path = this.cleanDirPath(path)
|
||||
if (this.ignoreDirs.includes(path)) return
|
||||
this.pendingDirsToRemoveFromIgnore = this.pendingDirsToRemoveFromIgnore.filter(p => p !== path)
|
||||
Logger.debug(`[Watcher] Ignoring directory "${path}"`)
|
||||
if (this.ignoreDirs.includes(path)) {
|
||||
// Already ignoring dir
|
||||
return
|
||||
}
|
||||
Logger.debug(`[Watcher] addIgnoreDir: Ignoring directory "${path}"`)
|
||||
this.ignoreDirs.push(path)
|
||||
}
|
||||
|
||||
|
|
@ -255,18 +260,24 @@ class FolderWatcher extends EventEmitter {
|
|||
*/
|
||||
removeIgnoreDir(path) {
|
||||
path = this.cleanDirPath(path)
|
||||
if (!this.ignoreDirs.includes(path) || this.pendingDirsToRemoveFromIgnore.includes(path)) return
|
||||
if (!this.ignoreDirs.includes(path)) {
|
||||
Logger.debug(`[Watcher] removeIgnoreDir: Path is not being ignored "${path}"`)
|
||||
return
|
||||
}
|
||||
|
||||
// Add a 5 second delay before removing the ignore from this dir
|
||||
this.pendingDirsToRemoveFromIgnore.push(path)
|
||||
setTimeout(() => {
|
||||
if (!this.pendingDirsToRemoveFromIgnore.includes(path)) {
|
||||
this.pendingDirsToRemoveFromIgnore.push(path)
|
||||
}
|
||||
|
||||
clearTimeout(this.removeFromIgnoreTimer)
|
||||
this.removeFromIgnoreTimer = setTimeout(() => {
|
||||
if (this.pendingDirsToRemoveFromIgnore.includes(path)) {
|
||||
this.pendingDirsToRemoveFromIgnore = this.pendingDirsToRemoveFromIgnore.filter(p => p !== path)
|
||||
Logger.debug(`[Watcher] No longer ignoring directory "${path}"`)
|
||||
Logger.debug(`[Watcher] removeIgnoreDir: No longer ignoring directory "${path}"`)
|
||||
this.ignoreDirs = this.ignoreDirs.filter(p => p !== path)
|
||||
}
|
||||
}, 5000)
|
||||
|
||||
}
|
||||
}
|
||||
module.exports = FolderWatcher
|
||||
|
|
@ -167,6 +167,30 @@ class AuthorController {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE: /api/authors/:id
|
||||
* Remove author from all books and delete
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async delete(req, res) {
|
||||
Logger.info(`[AuthorController] Removing author "${req.author.name}"`)
|
||||
|
||||
await Database.authorModel.removeById(req.author.id)
|
||||
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
|
||||
SocketAuthority.emitter('author_removed', req.author.toJSON())
|
||||
|
||||
// Update filter data
|
||||
Database.removeAuthorFromFilterData(req.author.libraryId, req.author.id)
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
async match(req, res) {
|
||||
let authorData = null
|
||||
const region = req.body.region || 'us'
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ class BackupController {
|
|||
|
||||
getAll(req, res) {
|
||||
res.json({
|
||||
backups: this.backupManager.backups.map(b => b.toJSON())
|
||||
backups: this.backupManager.backups.map(b => b.toJSON()),
|
||||
backupLocation: this.backupManager.backupLocation
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -42,6 +43,9 @@ class BackupController {
|
|||
Logger.debug(`Use X-Accel to serve static file ${encodedURI}`)
|
||||
return res.status(204).header({ 'X-Accel-Redirect': encodedURI }).send()
|
||||
}
|
||||
|
||||
res.setHeader('Content-disposition', 'attachment; filename=' + req.backup.filename)
|
||||
|
||||
res.sendFile(req.backup.fullPath)
|
||||
}
|
||||
|
||||
|
|
@ -65,4 +69,4 @@ class BackupController {
|
|||
next()
|
||||
}
|
||||
}
|
||||
module.exports = new BackupController()
|
||||
module.exports = new BackupController()
|
||||
|
|
|
|||
|
|
@ -620,7 +620,7 @@ class LibraryController {
|
|||
model: Database.bookModel,
|
||||
attributes: ['id', 'tags', 'explicit'],
|
||||
where: bookWhere,
|
||||
required: true,
|
||||
required: false,
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
|
|
|
|||
|
|
@ -225,15 +225,45 @@ class LibraryItemController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
// GET api/items/:id/cover
|
||||
/**
|
||||
* GET: api/items/:id/cover
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getCover(req, res) {
|
||||
const { query: { width, height, format, raw }, libraryItem } = req
|
||||
const { query: { width, height, format, raw } } = req
|
||||
|
||||
const libraryItem = await Database.libraryItemModel.findByPk(req.params.id, {
|
||||
attributes: ['id', 'mediaType', 'mediaId', 'libraryId'],
|
||||
include: [
|
||||
{
|
||||
model: Database.bookModel,
|
||||
attributes: ['id', 'coverPath', 'tags', 'explicit']
|
||||
},
|
||||
{
|
||||
model: Database.podcastModel,
|
||||
attributes: ['id', 'coverPath', 'tags', 'explicit']
|
||||
}
|
||||
]
|
||||
})
|
||||
if (!libraryItem) {
|
||||
Logger.warn(`[LibraryItemController] getCover: Library item "${req.params.id}" does not exist`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
// Check if user can access this library item
|
||||
if (!req.user.checkCanAccessLibraryItemWithData(libraryItem.libraryId, libraryItem.media.explicit, libraryItem.media.tags)) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// Check if library item media has a cover path
|
||||
if (!libraryItem.media.coverPath || !await fs.pathExists(libraryItem.media.coverPath)) {
|
||||
Logger.debug(`[LibraryItemController] getCover: Library item "${req.params.id}" has no cover path`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
if (raw) { // any value
|
||||
if (!libraryItem.media.coverPath || !await fs.pathExists(libraryItem.media.coverPath)) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
if (global.XAccel) {
|
||||
const encodedURI = encodeUriPath(global.XAccel + libraryItem.media.coverPath)
|
||||
Logger.debug(`Use X-Accel to serve static file ${encodedURI}`)
|
||||
|
|
@ -247,13 +277,7 @@ class LibraryItemController {
|
|||
height: height ? parseInt(height) : null,
|
||||
width: width ? parseInt(width) : null
|
||||
}
|
||||
return CacheManager.handleCoverCache(res, libraryItem, options)
|
||||
}
|
||||
|
||||
// GET: api/items/:id/stream
|
||||
openStream(req, res) {
|
||||
// this.streamManager.openStreamApiRequest(res, req.user, req.libraryItem)
|
||||
res.sendStatus(500)
|
||||
return CacheManager.handleCoverCache(res, libraryItem.id, libraryItem.media.coverPath, options)
|
||||
}
|
||||
|
||||
// POST: api/items/:id/play
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ class MeController {
|
|||
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(localProgress.libraryItemId)
|
||||
if (!libraryItem) {
|
||||
Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object no library item`, localProgress)
|
||||
Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object no library item with id "${localProgress.libraryItemId}"`, localProgress)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ class PodcastController {
|
|||
|
||||
res.json(libraryItem.toJSONExpanded())
|
||||
|
||||
if (payload.episodesToDownload && payload.episodesToDownload.length) {
|
||||
if (payload.episodesToDownload?.length) {
|
||||
Logger.info(`[PodcastController] Podcast created now starting ${payload.episodesToDownload.length} episode downloads`)
|
||||
this.podcastManager.downloadPodcastEpisodes(libraryItem, payload.episodesToDownload)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -52,21 +52,19 @@ class BookFinder {
|
|||
cleanTitleForCompares(title) {
|
||||
if (!title) return ''
|
||||
// Remove subtitle if there (i.e. "Cool Book: Coolest Ever" becomes "Cool Book")
|
||||
var stripped = this.stripSubtitle(title)
|
||||
let stripped = this.stripSubtitle(title)
|
||||
|
||||
// Remove text in paranthesis (i.e. "Ender's Game (Ender's Saga)" becomes "Ender's Game")
|
||||
var cleaned = stripped.replace(/ *\([^)]*\) */g, "")
|
||||
let cleaned = stripped.replace(/ *\([^)]*\) */g, "")
|
||||
|
||||
// Remove single quotes (i.e. "Ender's Game" becomes "Enders Game")
|
||||
cleaned = cleaned.replace(/'/g, '')
|
||||
cleaned = this.replaceAccentedChars(cleaned)
|
||||
return cleaned.toLowerCase()
|
||||
return this.replaceAccentedChars(cleaned)
|
||||
}
|
||||
|
||||
cleanAuthorForCompares(author) {
|
||||
if (!author) return ''
|
||||
var cleaned = this.replaceAccentedChars(author)
|
||||
return cleaned.toLowerCase()
|
||||
return this.replaceAccentedChars(author)
|
||||
}
|
||||
|
||||
filterSearchResults(books, title, author, maxTitleDistance, maxAuthorDistance) {
|
||||
|
|
@ -181,12 +179,134 @@ class BookFinder {
|
|||
return books
|
||||
}
|
||||
|
||||
addTitleCandidate(title, candidates) {
|
||||
// Main variant
|
||||
const cleanTitle = this.cleanTitleForCompares(title).trim()
|
||||
if (!cleanTitle) return
|
||||
candidates.add(cleanTitle)
|
||||
|
||||
let candidate = cleanTitle
|
||||
|
||||
// Remove subtitle
|
||||
candidate = candidate.replace(/([,:;_]| by ).*/g, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
|
||||
// Remove preceding/trailing numbers
|
||||
candidate = candidate.replace(/^\d+ | \d+$/g, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
|
||||
// Remove bitrate
|
||||
candidate = candidate.replace(/(^| )\d+k(bps)?( |$)/, " ").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
|
||||
// Remove edition
|
||||
candidate = candidate.replace(/ (2nd|3rd|\d+th)\s+ed(\.|ition)?/, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for books including fuzzy searches
|
||||
*
|
||||
* @param {string} provider
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
* @param {string} isbn
|
||||
* @param {string} asin
|
||||
* @param {{titleDistance:number, authorDistance:number, maxFuzzySearches:number}} options
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async search(provider, title, author, isbn, asin, options = {}) {
|
||||
var books = []
|
||||
var maxTitleDistance = !isNaN(options.titleDistance) ? Number(options.titleDistance) : 4
|
||||
var maxAuthorDistance = !isNaN(options.authorDistance) ? Number(options.authorDistance) : 4
|
||||
let books = []
|
||||
const maxTitleDistance = !isNaN(options.titleDistance) ? Number(options.titleDistance) : 4
|
||||
const maxAuthorDistance = !isNaN(options.authorDistance) ? Number(options.authorDistance) : 4
|
||||
const maxFuzzySearches = !isNaN(options.maxFuzzySearches) ? Number(options.maxFuzzySearches) : 5
|
||||
let numFuzzySearches = 0
|
||||
|
||||
if (!title)
|
||||
return books
|
||||
|
||||
books = await this.runSearch(title, author, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
|
||||
if (!books.length && maxFuzzySearches > 0) {
|
||||
// normalize title and author
|
||||
title = title.trim().toLowerCase()
|
||||
author = author.trim().toLowerCase()
|
||||
|
||||
// Now run up to maxFuzzySearches fuzzy searches
|
||||
let candidates = new Set()
|
||||
let cleanedAuthor = this.cleanAuthorForCompares(author)
|
||||
this.addTitleCandidate(title, candidates)
|
||||
|
||||
// remove parentheses and their contents, and replace with a separator
|
||||
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}/g, " - ")
|
||||
// Split title into hypen-separated parts
|
||||
const titleParts = cleanTitle.split(/ - | -|- /)
|
||||
for (const titlePart of titleParts) {
|
||||
this.addTitleCandidate(titlePart, candidates)
|
||||
}
|
||||
// We already searched for original title
|
||||
if (author == cleanedAuthor) candidates.delete(title)
|
||||
if (candidates.size > 0) {
|
||||
candidates = [...candidates]
|
||||
candidates.sort((a, b) => {
|
||||
// Candidates that include the author are likely low quality
|
||||
const includesAuthorDiff = !b.includes(cleanedAuthor) - !a.includes(cleanedAuthor)
|
||||
if (includesAuthorDiff) return includesAuthorDiff
|
||||
// Candidates that include only digits are also likely low quality
|
||||
const onlyDigits = /^\d+$/
|
||||
const includesOnlyDigitsDiff = !onlyDigits.test(b) - !onlyDigits.test(a)
|
||||
if (includesOnlyDigitsDiff) return includesOnlyDigitsDiff
|
||||
// Start with longer candidaets, as they are likely more specific
|
||||
const lengthDiff = b.length - a.length
|
||||
if (lengthDiff) return lengthDiff
|
||||
return b.localeCompare(a)
|
||||
})
|
||||
Logger.debug(`[BookFinder] Found ${candidates.length} fuzzy title candidates`, candidates)
|
||||
for (const candidate of candidates) {
|
||||
if (++numFuzzySearches > maxFuzzySearches) return books
|
||||
books = await this.runSearch(candidate, cleanedAuthor, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break
|
||||
}
|
||||
if (!books.length) {
|
||||
// Now try searching without the author
|
||||
for (const candidate of candidates) {
|
||||
if (++numFuzzySearches > maxFuzzySearches) return books
|
||||
books = await this.runSearch(candidate, '', provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'openlibrary') {
|
||||
books.sort((a, b) => {
|
||||
return a.totalDistance - b.totalDistance
|
||||
})
|
||||
}
|
||||
|
||||
return books
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for books
|
||||
*
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
* @param {string} provider
|
||||
* @param {string} asin only used for audible providers
|
||||
* @param {number} maxTitleDistance only used for openlibrary provider
|
||||
* @param {number} maxAuthorDistance only used for openlibrary provider
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async runSearch(title, author, provider, asin, maxTitleDistance, maxAuthorDistance) {
|
||||
Logger.debug(`Book Search: title: "${title}", author: "${author || ''}", provider: ${provider}`)
|
||||
|
||||
let books = []
|
||||
|
||||
if (provider === 'google') {
|
||||
books = await this.getGoogleBooksResults(title, author)
|
||||
} else if (provider.startsWith('audible')) {
|
||||
|
|
@ -203,23 +323,6 @@ class BookFinder {
|
|||
else {
|
||||
books = await this.getGoogleBooksResults(title, author)
|
||||
}
|
||||
|
||||
if (!books.length && !options.currentlyTryingCleaned) {
|
||||
var cleanedTitle = this.cleanTitleForCompares(title)
|
||||
var cleanedAuthor = this.cleanAuthorForCompares(author)
|
||||
if (cleanedTitle == title && cleanedAuthor == author) return books
|
||||
|
||||
Logger.debug(`Book Search, no matches.. checking cleaned title and author`)
|
||||
options.currentlyTryingCleaned = true
|
||||
return this.search(provider, cleanedTitle, cleanedAuthor, isbn, asin, options)
|
||||
}
|
||||
|
||||
if (provider === 'openlibrary') {
|
||||
books.sort((a, b) => {
|
||||
return a.totalDistance - b.totalDistance
|
||||
})
|
||||
}
|
||||
|
||||
return books
|
||||
}
|
||||
|
||||
|
|
@ -253,4 +356,4 @@ class BookFinder {
|
|||
return this.audnexus.getChaptersByASIN(asin, region)
|
||||
}
|
||||
}
|
||||
module.exports = new BookFinder()
|
||||
module.exports = new BookFinder()
|
||||
|
|
|
|||
|
|
@ -26,6 +26,10 @@ class BackupManager {
|
|||
this.backups = []
|
||||
}
|
||||
|
||||
get backupLocation() {
|
||||
return this.BackupPath
|
||||
}
|
||||
|
||||
get backupSchedule() {
|
||||
return global.ServerSettings.backupSchedule
|
||||
}
|
||||
|
|
@ -96,7 +100,7 @@ class BackupManager {
|
|||
let entries
|
||||
try {
|
||||
entries = await zip.entries()
|
||||
} catch(error){
|
||||
} catch (error) {
|
||||
// Not a valid zip file
|
||||
Logger.error('[BackupManager] Failed to read backup file - backup might not be a valid .zip file', tempPath, error)
|
||||
return res.status(400).send('Failed to read backup file - backup might not be a valid .zip file')
|
||||
|
|
@ -178,7 +182,6 @@ class BackupManager {
|
|||
data = await zip.entryData('details')
|
||||
} catch (error) {
|
||||
Logger.error(`[BackupManager] Failed to unzip backup "${fullFilePath}"`, error)
|
||||
await zip.close()
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -39,14 +39,14 @@ class CacheManager {
|
|||
}
|
||||
}
|
||||
|
||||
async handleCoverCache(res, libraryItem, options = {}) {
|
||||
async handleCoverCache(res, libraryItemId, coverPath, options = {}) {
|
||||
const format = options.format || 'webp'
|
||||
const width = options.width || 400
|
||||
const height = options.height || null
|
||||
|
||||
res.type(`image/${format}`)
|
||||
|
||||
const path = Path.join(this.CoverCachePath, `${libraryItem.id}_${width}${height ? `x${height}` : ''}`) + '.' + format
|
||||
const path = Path.join(this.CoverCachePath, `${libraryItemId}_${width}${height ? `x${height}` : ''}`) + '.' + format
|
||||
|
||||
// Cache exists
|
||||
if (await fs.pathExists(path)) {
|
||||
|
|
@ -67,11 +67,7 @@ class CacheManager {
|
|||
return ps.pipe(res)
|
||||
}
|
||||
|
||||
if (!libraryItem.media.coverPath || !await fs.pathExists(libraryItem.media.coverPath)) {
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
|
||||
const writtenFile = await resizeImage(libraryItem.media.coverPath, path, width, height)
|
||||
const writtenFile = await resizeImage(coverPath, path, width, height)
|
||||
if (!writtenFile) return res.sendStatus(500)
|
||||
|
||||
if (global.XAccel) {
|
||||
|
|
|
|||
|
|
@ -47,10 +47,14 @@ class BookAuthor extends Model {
|
|||
book.belongsToMany(author, { through: BookAuthor })
|
||||
author.belongsToMany(book, { through: BookAuthor })
|
||||
|
||||
book.hasMany(BookAuthor)
|
||||
book.hasMany(BookAuthor, {
|
||||
onDelete: 'CASCADE'
|
||||
})
|
||||
BookAuthor.belongsTo(book)
|
||||
|
||||
author.hasMany(BookAuthor)
|
||||
author.hasMany(BookAuthor, {
|
||||
onDelete: 'CASCADE'
|
||||
})
|
||||
BookAuthor.belongsTo(author)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -176,6 +176,8 @@ class Feed extends Model {
|
|||
if (!existingFeed) return false
|
||||
|
||||
let hasUpdates = false
|
||||
|
||||
// Remove and update existing feed episodes
|
||||
for (const feedEpisode of existingFeed.feedEpisodes) {
|
||||
const oldFeedEpisode = oldFeedEpisodes.find(ep => ep.id === feedEpisode.id)
|
||||
// Episode removed
|
||||
|
|
@ -196,6 +198,14 @@ class Feed extends Model {
|
|||
}
|
||||
}
|
||||
|
||||
// Add new feed episodes
|
||||
for (const episode of oldFeedEpisodes) {
|
||||
if (!existingFeed.feedEpisodes.some(fe => fe.id === episode.id)) {
|
||||
await this.sequelize.models.feedEpisode.createFromOld(feedObj.id, episode)
|
||||
hasUpdates = true
|
||||
}
|
||||
}
|
||||
|
||||
let feedHasUpdates = false
|
||||
for (const key in feedObj) {
|
||||
let existingValue = existingFeed[key]
|
||||
|
|
|
|||
|
|
@ -63,6 +63,19 @@ class FeedEpisode extends Model {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create feed episode from old model
|
||||
*
|
||||
* @param {string} feedId
|
||||
* @param {Object} oldFeedEpisode
|
||||
* @returns {Promise<FeedEpisode>}
|
||||
*/
|
||||
static createFromOld(feedId, oldFeedEpisode) {
|
||||
const newEpisode = this.getFromOld(oldFeedEpisode)
|
||||
newEpisode.feedId = feedId
|
||||
return this.create(newEpisode)
|
||||
}
|
||||
|
||||
static getFromOld(oldFeedEpisode) {
|
||||
return {
|
||||
id: oldFeedEpisode.id,
|
||||
|
|
|
|||
|
|
@ -794,6 +794,9 @@ class LibraryItem extends Model {
|
|||
{
|
||||
fields: ['libraryId', 'mediaType']
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaId', 'mediaType']
|
||||
},
|
||||
{
|
||||
fields: ['birthtime']
|
||||
},
|
||||
|
|
|
|||
|
|
@ -168,7 +168,13 @@ class PlaybackSession {
|
|||
this.currentTime = session.currentTime || 0
|
||||
|
||||
this.startedAt = session.startedAt
|
||||
this.updatedAt = session.updatedAt || null
|
||||
this.updatedAt = session.updatedAt || session.startedAt
|
||||
|
||||
// Local playback sessions dont set this date field so set using updatedAt
|
||||
if (!this.date && session.updatedAt) {
|
||||
this.date = date.format(new Date(session.updatedAt), 'YYYY-MM-DD')
|
||||
this.dayOfWeek = date.format(new Date(session.updatedAt), 'dddd')
|
||||
}
|
||||
}
|
||||
|
||||
get mediaItemId() {
|
||||
|
|
|
|||
|
|
@ -208,6 +208,7 @@ class ServerSettings {
|
|||
loggerScannerLogsToKeep: this.loggerScannerLogsToKeep,
|
||||
homeBookshelfView: this.homeBookshelfView,
|
||||
bookshelfView: this.bookshelfView,
|
||||
podcastEpisodeSchedule: this.podcastEpisodeSchedule,
|
||||
sortingIgnorePrefix: this.sortingIgnorePrefix,
|
||||
sortingPrefixes: [...this.sortingPrefixes],
|
||||
chromecastEnabled: this.chromecastEnabled,
|
||||
|
|
|
|||
|
|
@ -326,6 +326,18 @@ class User {
|
|||
return this.checkCanAccessLibraryItemWithTags(libraryItem.media.tags)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a user can access a library item
|
||||
* @param {string} libraryId
|
||||
* @param {boolean} explicit
|
||||
* @param {string[]} tags
|
||||
*/
|
||||
checkCanAccessLibraryItemWithData(libraryId, explicit, tags) {
|
||||
if (!this.checkCanAccessLibrary(libraryId)) return false
|
||||
if (explicit && !this.canAccessExplicitContent) return false
|
||||
return this.checkCanAccessLibraryItemWithTags(tags)
|
||||
}
|
||||
|
||||
findBookmark(libraryItemId, time) {
|
||||
return this.bookmarks.find(bm => bm.libraryItemId === libraryItemId && bm.time == time)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ class ApiRouter {
|
|||
this.router.delete('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.delete.bind(this))
|
||||
this.router.get('/items/:id/download', LibraryItemController.middleware.bind(this), LibraryItemController.download.bind(this))
|
||||
this.router.patch('/items/:id/media', LibraryItemController.middleware.bind(this), LibraryItemController.updateMedia.bind(this))
|
||||
this.router.get('/items/:id/cover', LibraryItemController.middleware.bind(this), LibraryItemController.getCover.bind(this))
|
||||
this.router.get('/items/:id/cover', LibraryItemController.getCover.bind(this))
|
||||
this.router.post('/items/:id/cover', LibraryItemController.middleware.bind(this), LibraryItemController.uploadCover.bind(this))
|
||||
this.router.patch('/items/:id/cover', LibraryItemController.middleware.bind(this), LibraryItemController.updateCover.bind(this))
|
||||
this.router.delete('/items/:id/cover', LibraryItemController.middleware.bind(this), LibraryItemController.removeCover.bind(this))
|
||||
|
|
@ -199,6 +199,7 @@ class ApiRouter {
|
|||
//
|
||||
this.router.get('/authors/:id', AuthorController.middleware.bind(this), AuthorController.findOne.bind(this))
|
||||
this.router.patch('/authors/:id', AuthorController.middleware.bind(this), AuthorController.update.bind(this))
|
||||
this.router.delete('/authors/:id', AuthorController.middleware.bind(this), AuthorController.delete.bind(this))
|
||||
this.router.post('/authors/:id/match', AuthorController.middleware.bind(this), AuthorController.match.bind(this))
|
||||
this.router.get('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.getImage.bind(this))
|
||||
|
||||
|
|
|
|||
|
|
@ -168,9 +168,7 @@ class BookScanner {
|
|||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
// TODO: When metadata file is stored in /metadata/items/{libraryItemId}.[abs|json] we should load this
|
||||
// TODO: store an additional array of metadata keys that the user has changed manually so we know what not to override
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan)
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan, existingLibraryItem.id)
|
||||
let authorsUpdated = false
|
||||
const bookAuthorsRemoved = []
|
||||
let seriesUpdated = false
|
||||
|
|
@ -550,9 +548,10 @@ class BookScanner {
|
|||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {string} [existingLibraryItemId]
|
||||
* @returns {Promise<BookMetadataObject>}
|
||||
*/
|
||||
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan) {
|
||||
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan, existingLibraryItemId = null) {
|
||||
// First set book metadata from folder/file names
|
||||
const bookMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title,
|
||||
|
|
@ -722,11 +721,31 @@ class BookScanner {
|
|||
|
||||
// If metadata.json or metadata.abs use this for metadata
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile || libraryItemData.metadataAbsLibraryFile
|
||||
const metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
let metadataFileFormat = libraryItemData.metadataJsonLibraryFile ? 'json' : 'abs'
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
let altFormat = global.ServerSettings.metadataFileFormat === 'json' ? 'abs' : 'json'
|
||||
// First check the metadata format set in server settings, fallback to the alternate
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
} else if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.${altFormat}`))) {
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${altFormat}`)
|
||||
metadataFileFormat = altFormat
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataLibraryFile.metadata.path}" - preferring`)
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}" - preferring`)
|
||||
let abMetadata = null
|
||||
if (!!libraryItemData.metadataJsonLibraryFile) {
|
||||
if (metadataFileFormat === 'json') {
|
||||
abMetadata = abmetadataGenerator.parseJson(metadataText)
|
||||
} else {
|
||||
abMetadata = abmetadataGenerator.parse(metadataText, 'book')
|
||||
|
|
@ -1092,7 +1111,7 @@ class BookScanner {
|
|||
const result = await CoverManager.downloadCoverFromUrlNew(results[i], libraryItemId, libraryItemPath)
|
||||
|
||||
if (result.error) {
|
||||
Logger.error(`[Scanner] Failed to download cover from url "${results[i]}" | Attempt ${i + 1}`, result.error)
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to download cover from url "${results[i]}" | Attempt ${i + 1}`, result.error)
|
||||
} else if (result.cover) {
|
||||
return result.cover
|
||||
}
|
||||
|
|
|
|||
|
|
@ -145,8 +145,7 @@ class PodcastScanner {
|
|||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
// TODO: When metadata file is stored in /metadata/items/{libraryItemId}.[abs|json] we should load this
|
||||
const podcastMetadata = await this.getPodcastMetadataFromScanData(existingPodcastEpisodes, libraryItemData, libraryScan)
|
||||
const podcastMetadata = await this.getPodcastMetadataFromScanData(existingPodcastEpisodes, libraryItemData, libraryScan, existingLibraryItem.id)
|
||||
|
||||
for (const key in podcastMetadata) {
|
||||
// Ignore unset metadata and empty arrays
|
||||
|
|
@ -312,9 +311,10 @@ class PodcastScanner {
|
|||
* @param {PodcastEpisode[]} podcastEpisodes Not the models for new podcasts
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {string} [existingLibraryItemId]
|
||||
* @returns {Promise<PodcastMetadataObject>}
|
||||
*/
|
||||
async getPodcastMetadataFromScanData(podcastEpisodes, libraryItemData, libraryScan) {
|
||||
async getPodcastMetadataFromScanData(podcastEpisodes, libraryItemData, libraryScan, existingLibraryItemId = null) {
|
||||
const podcastMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title,
|
||||
titleIgnorePrefix: getTitleIgnorePrefix(libraryItemData.mediaMetadata.title),
|
||||
|
|
@ -389,11 +389,31 @@ class PodcastScanner {
|
|||
|
||||
// If metadata.json or metadata.abs use this for metadata
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile || libraryItemData.metadataAbsLibraryFile
|
||||
const metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
let metadataFileFormat = libraryItemData.metadataJsonLibraryFile ? 'json' : 'abs'
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
let altFormat = global.ServerSettings.metadataFileFormat === 'json' ? 'abs' : 'json'
|
||||
// First check the metadata format set in server settings, fallback to the alternate
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
} else if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.${altFormat}`))) {
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${altFormat}`)
|
||||
metadataFileFormat = altFormat
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataLibraryFile.metadata.path}" - preferring`)
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}" - preferring`)
|
||||
let abMetadata = null
|
||||
if (!!libraryItemData.metadataJsonLibraryFile) {
|
||||
if (metadataFileFormat === 'json') {
|
||||
abMetadata = abmetadataGenerator.parseJson(metadataText)
|
||||
} else {
|
||||
abMetadata = abmetadataGenerator.parse(metadataText, 'podcast')
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class Scanner {
|
|||
var searchISBN = options.isbn || libraryItem.media.metadata.isbn
|
||||
var searchASIN = options.asin || libraryItem.media.metadata.asin
|
||||
|
||||
var results = await BookFinder.search(provider, searchTitle, searchAuthor, searchISBN, searchASIN)
|
||||
var results = await BookFinder.search(provider, searchTitle, searchAuthor, searchISBN, searchASIN, { maxFuzzySearches: 2 })
|
||||
if (!results.length) {
|
||||
return {
|
||||
warning: `No ${provider} match found`
|
||||
|
|
|
|||
|
|
@ -190,6 +190,7 @@ module.exports = {
|
|||
const json = li.toJSONMinified()
|
||||
json.media.metadata.series = {
|
||||
id: filteredSeries.id,
|
||||
name: filteredSeries.name,
|
||||
sequence: filteredSeries.sequence
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -16,8 +16,8 @@ function parseCreators(metadata) {
|
|||
}
|
||||
|
||||
function fetchCreators(creators, role) {
|
||||
if (!creators || !creators.length) return null
|
||||
return [...new Set(creators.filter(c => c.role === role).map(c => c.value))]
|
||||
if (!creators?.length) return null
|
||||
return [...new Set(creators.filter(c => c.role === role && c.value).map(c => c.value))]
|
||||
}
|
||||
|
||||
function fetchTagString(metadata, tag) {
|
||||
|
|
@ -92,7 +92,7 @@ function fetchDescription(metadata) {
|
|||
|
||||
function fetchGenres(metadata) {
|
||||
if (!metadata['dc:subject'] || !metadata['dc:subject'].length) return []
|
||||
return [...new Set(metadata['dc:subject'].filter(g => typeof g === 'string'))]
|
||||
return [...new Set(metadata['dc:subject'].filter(g => g && typeof g === 'string'))]
|
||||
}
|
||||
|
||||
function fetchLanguage(metadata) {
|
||||
|
|
@ -122,7 +122,7 @@ function fetchNarrators(creators, metadata) {
|
|||
|
||||
function fetchTags(metadata) {
|
||||
if (!metadata['dc:tag'] || !metadata['dc:tag'].length) return []
|
||||
return [...new Set(metadata['dc:tag'].filter(tag => typeof tag === 'string'))]
|
||||
return [...new Set(metadata['dc:tag'].filter(tag => tag && typeof tag === 'string'))]
|
||||
}
|
||||
|
||||
function stripPrefix(str) {
|
||||
|
|
|
|||
|
|
@ -205,6 +205,15 @@ module.exports = {
|
|||
}
|
||||
}
|
||||
]
|
||||
|
||||
// Handle library setting to hide single book series
|
||||
// TODO: Merge with existing query
|
||||
if (library.settings.hideSingleBookSeries) {
|
||||
seriesWhere.push(Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM books b, bookSeries bs WHERE bs.seriesId = series.id AND bs.bookId = b.id)`), {
|
||||
[Sequelize.Op.gt]: 1
|
||||
}))
|
||||
}
|
||||
|
||||
// Handle user permissions to only include series with at least 1 book
|
||||
// TODO: Simplify to a single query
|
||||
if (userPermissionBookWhere.bookWhere.length) {
|
||||
|
|
|
|||
|
|
@ -247,7 +247,7 @@ module.exports = {
|
|||
podcastEpisodeWhere['$mediaProgresses.isFinished$'] = true
|
||||
}
|
||||
} else if (filterGroup === 'recent') {
|
||||
libraryItemWhere['createdAt'] = {
|
||||
podcastEpisodeWhere['createdAt'] = {
|
||||
[Sequelize.Op.gte]: new Date(new Date() - (60 * 24 * 60 * 60 * 1000)) // 60 days ago
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue