mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-12-24 12:49:38 +00:00
Merge master
This commit is contained in:
commit
ab14b561f5
147 changed files with 4669 additions and 5036 deletions
|
|
@ -276,11 +276,17 @@ class Database {
|
|||
global.ServerSettings = this.serverSettings.toJSON()
|
||||
|
||||
// Version specific migrations
|
||||
if (this.serverSettings.version === '2.3.0' && this.compareVersions(packageJson.version, '2.3.0') == 1) {
|
||||
await dbMigration.migrationPatch(this)
|
||||
if (packageJson.version !== this.serverSettings.version) {
|
||||
if (this.serverSettings.version === '2.3.0' && this.compareVersions(packageJson.version, '2.3.0') == 1) {
|
||||
await dbMigration.migrationPatch(this)
|
||||
}
|
||||
if (['2.3.0', '2.3.1', '2.3.2', '2.3.3'].includes(this.serverSettings.version) && this.compareVersions(packageJson.version, '2.3.3') >= 0) {
|
||||
await dbMigration.migrationPatch2(this)
|
||||
}
|
||||
}
|
||||
if (['2.3.0', '2.3.1', '2.3.2', '2.3.3'].includes(this.serverSettings.version) && this.compareVersions(packageJson.version, '2.3.3') >= 0) {
|
||||
await dbMigration.migrationPatch2(this)
|
||||
// Build migrations
|
||||
if (this.serverSettings.buildNumber <= 0) {
|
||||
await require('./utils/migrations/absMetadataMigration').migrate(this)
|
||||
}
|
||||
|
||||
await this.cleanDatabase()
|
||||
|
|
@ -288,9 +294,19 @@ class Database {
|
|||
// Set if root user has been created
|
||||
this.hasRootUser = await this.models.user.getHasRootUser()
|
||||
|
||||
// Update server settings with version/build
|
||||
let updateServerSettings = false
|
||||
if (packageJson.version !== this.serverSettings.version) {
|
||||
Logger.info(`[Database] Server upgrade detected from ${this.serverSettings.version} to ${packageJson.version}`)
|
||||
this.serverSettings.version = packageJson.version
|
||||
this.serverSettings.buildNumber = packageJson.buildNumber
|
||||
updateServerSettings = true
|
||||
} else if (packageJson.buildNumber !== this.serverSettings.buildNumber) {
|
||||
Logger.info(`[Database] Server v${packageJson.version} build upgraded from ${this.serverSettings.buildNumber} to ${packageJson.buildNumber}`)
|
||||
this.serverSettings.buildNumber = packageJson.buildNumber
|
||||
updateServerSettings = true
|
||||
}
|
||||
if (updateServerSettings) {
|
||||
await this.updateServerSettings()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ const PodcastManager = require('./managers/PodcastManager')
|
|||
const AudioMetadataMangaer = require('./managers/AudioMetadataManager')
|
||||
const RssFeedManager = require('./managers/RssFeedManager')
|
||||
const CronManager = require('./managers/CronManager')
|
||||
const TaskManager = require('./managers/TaskManager')
|
||||
const LibraryScanner = require('./scanner/LibraryScanner')
|
||||
|
||||
//Import the main Passport and Express-Session library
|
||||
|
|
@ -64,15 +63,14 @@ class Server {
|
|||
this.auth = new Auth()
|
||||
|
||||
// Managers
|
||||
this.taskManager = new TaskManager()
|
||||
this.notificationManager = new NotificationManager()
|
||||
this.emailManager = new EmailManager()
|
||||
this.backupManager = new BackupManager()
|
||||
this.logManager = new LogManager()
|
||||
this.abMergeManager = new AbMergeManager(this.taskManager)
|
||||
this.abMergeManager = new AbMergeManager()
|
||||
this.playbackSessionManager = new PlaybackSessionManager()
|
||||
this.podcastManager = new PodcastManager(this.watcher, this.notificationManager, this.taskManager)
|
||||
this.audioMetadataManager = new AudioMetadataMangaer(this.taskManager)
|
||||
this.podcastManager = new PodcastManager(this.watcher, this.notificationManager)
|
||||
this.audioMetadataManager = new AudioMetadataMangaer()
|
||||
this.rssFeedManager = new RssFeedManager()
|
||||
this.cronManager = new CronManager(this.podcastManager)
|
||||
|
||||
|
|
@ -95,10 +93,6 @@ class Server {
|
|||
LibraryScanner.setCancelLibraryScan(libraryId)
|
||||
}
|
||||
|
||||
getLibrariesScanning() {
|
||||
return LibraryScanner.librariesScanning
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database, backups, logs, rss feeds, cron jobs & watcher
|
||||
* Cleanup stale/invalid data
|
||||
|
|
@ -179,7 +173,6 @@ class Server {
|
|||
// Static folder
|
||||
router.use(express.static(Path.join(global.appRoot, 'static')))
|
||||
|
||||
// router.use('/api/v1', routes) // TODO: New routes
|
||||
router.use('/api', Auth.cors, this.authMiddleware.bind(this), this.apiRouter.router)
|
||||
router.use('/hls', this.authMiddleware.bind(this), this.hlsRouter.router)
|
||||
|
||||
|
|
@ -188,7 +181,7 @@ class Server {
|
|||
Logger.info(`[Server] Requesting rss feed ${req.params.slug}`)
|
||||
this.rssFeedManager.getFeed(req, res)
|
||||
})
|
||||
router.get('/feed/:slug/cover', (req, res) => {
|
||||
router.get('/feed/:slug/cover*', (req, res) => {
|
||||
this.rssFeedManager.getFeedCover(req, res)
|
||||
})
|
||||
router.get('/feed/:slug/item/:episodeId/*', (req, res) => {
|
||||
|
|
|
|||
|
|
@ -198,8 +198,7 @@ class SocketAuthority {
|
|||
|
||||
const initialPayload = {
|
||||
userId: client.user.id,
|
||||
username: client.user.username,
|
||||
librariesScanning: this.Server.getLibrariesScanning()
|
||||
username: client.user.username
|
||||
}
|
||||
if (user.isAdminOrUp) {
|
||||
initialPayload.usersOnline = this.getUsersOnline()
|
||||
|
|
|
|||
|
|
@ -3,8 +3,10 @@ const EventEmitter = require('events')
|
|||
const Watcher = require('./libs/watcher/watcher')
|
||||
const Logger = require('./Logger')
|
||||
const LibraryScanner = require('./scanner/LibraryScanner')
|
||||
const Task = require('./objects/Task')
|
||||
const TaskManager = require('./managers/TaskManager')
|
||||
|
||||
const { filePathToPOSIX } = require('./utils/fileUtils')
|
||||
const { filePathToPOSIX, isSameOrSubPath, getFileMTimeMs } = require('./utils/fileUtils')
|
||||
|
||||
/**
|
||||
* @typedef PendingFileUpdate
|
||||
|
|
@ -22,7 +24,12 @@ class FolderWatcher extends EventEmitter {
|
|||
/** @type {PendingFileUpdate[]} */
|
||||
this.pendingFileUpdates = []
|
||||
this.pendingDelay = 4000
|
||||
/** @type {NodeJS.Timeout} */
|
||||
this.pendingTimeout = null
|
||||
/** @type {Task} */
|
||||
this.pendingTask = null
|
||||
|
||||
this.filesBeingAdded = new Set()
|
||||
|
||||
/** @type {string[]} */
|
||||
this.ignoreDirs = []
|
||||
|
|
@ -59,14 +66,13 @@ class FolderWatcher extends EventEmitter {
|
|||
})
|
||||
watcher
|
||||
.on('add', (path) => {
|
||||
this.onNewFile(library.id, path)
|
||||
this.onFileAdded(library.id, filePathToPOSIX(path))
|
||||
}).on('change', (path) => {
|
||||
// This is triggered from metadata changes, not what we want
|
||||
// this.onFileUpdated(path)
|
||||
}).on('unlink', path => {
|
||||
this.onFileRemoved(library.id, path)
|
||||
this.onFileRemoved(library.id, filePathToPOSIX(path))
|
||||
}).on('rename', (path, pathNext) => {
|
||||
this.onRename(library.id, path, pathNext)
|
||||
this.onFileRename(library.id, filePathToPOSIX(path), filePathToPOSIX(pathNext))
|
||||
}).on('error', (error) => {
|
||||
Logger.error(`[Watcher] ${error}`)
|
||||
}).on('ready', () => {
|
||||
|
|
@ -132,14 +138,31 @@ class FolderWatcher extends EventEmitter {
|
|||
return this.libraryWatchers.map(lib => lib.watcher.close())
|
||||
}
|
||||
|
||||
onNewFile(libraryId, path) {
|
||||
/**
|
||||
* Watcher detected file added
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
*/
|
||||
onFileAdded(libraryId, path) {
|
||||
if (this.checkShouldIgnorePath(path)) {
|
||||
return
|
||||
}
|
||||
Logger.debug('[Watcher] File Added', path)
|
||||
this.addFileUpdate(libraryId, path, 'added')
|
||||
|
||||
if (!this.filesBeingAdded.has(path)) {
|
||||
this.filesBeingAdded.add(path)
|
||||
this.waitForFileToAdd(path)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Watcher detected file removed
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
*/
|
||||
onFileRemoved(libraryId, path) {
|
||||
if (this.checkShouldIgnorePath(path)) {
|
||||
return
|
||||
|
|
@ -148,11 +171,13 @@ class FolderWatcher extends EventEmitter {
|
|||
this.addFileUpdate(libraryId, path, 'deleted')
|
||||
}
|
||||
|
||||
onFileUpdated(path) {
|
||||
Logger.debug('[Watcher] Updated File', path)
|
||||
}
|
||||
|
||||
onRename(libraryId, pathFrom, pathTo) {
|
||||
/**
|
||||
* Watcher detected file renamed
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
*/
|
||||
onFileRename(libraryId, pathFrom, pathTo) {
|
||||
if (this.checkShouldIgnorePath(pathTo)) {
|
||||
return
|
||||
}
|
||||
|
|
@ -161,13 +186,41 @@ class FolderWatcher extends EventEmitter {
|
|||
}
|
||||
|
||||
/**
|
||||
* File update detected from watcher
|
||||
* Get mtimeMs from an added file every second until it is no longer changing
|
||||
* Times out after 180s
|
||||
*
|
||||
* @param {string} path
|
||||
* @param {number} [lastMTimeMs=0]
|
||||
* @param {number} [loop=0]
|
||||
*/
|
||||
async waitForFileToAdd(path, lastMTimeMs = 0, loop = 0) {
|
||||
// Safety to catch infinite loop (180s)
|
||||
if (loop >= 180) {
|
||||
Logger.warn(`[Watcher] Waiting to add file at "${path}" timeout (loop ${loop}) - proceeding`)
|
||||
return this.filesBeingAdded.delete(path)
|
||||
}
|
||||
|
||||
const mtimeMs = await getFileMTimeMs(path)
|
||||
if (mtimeMs === lastMTimeMs) {
|
||||
if (lastMTimeMs) Logger.debug(`[Watcher] File finished adding at "${path}"`)
|
||||
return this.filesBeingAdded.delete(path)
|
||||
}
|
||||
if (lastMTimeMs % 5 === 0) {
|
||||
Logger.debug(`[Watcher] Waiting to add file at "${path}". mtimeMs=${mtimeMs} lastMTimeMs=${lastMTimeMs} (loop ${loop})`)
|
||||
}
|
||||
// Wait 1 second
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
this.waitForFileToAdd(path, mtimeMs, ++loop)
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue file update
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
* @param {string} type
|
||||
*/
|
||||
addFileUpdate(libraryId, path, type) {
|
||||
path = filePathToPOSIX(path)
|
||||
if (this.pendingFilePaths.includes(path)) return
|
||||
|
||||
// Get file library
|
||||
|
|
@ -178,7 +231,7 @@ class FolderWatcher extends EventEmitter {
|
|||
}
|
||||
|
||||
// Get file folder
|
||||
const folder = libwatcher.folders.find(fold => path.startsWith(filePathToPOSIX(fold.fullPath)))
|
||||
const folder = libwatcher.folders.find(fold => isSameOrSubPath(fold.fullPath, path))
|
||||
if (!folder) {
|
||||
Logger.error(`[Watcher] New file folder not found in library "${libwatcher.name}" with path "${path}"`)
|
||||
return
|
||||
|
|
@ -202,6 +255,13 @@ class FolderWatcher extends EventEmitter {
|
|||
|
||||
Logger.debug(`[Watcher] Modified file in library "${libwatcher.name}" and folder "${folder.id}" with relPath "${relPath}"`)
|
||||
|
||||
if (!this.pendingTask) {
|
||||
const taskData = {
|
||||
libraryId,
|
||||
libraryName: libwatcher.name
|
||||
}
|
||||
this.pendingTask = TaskManager.createAndAddTask('watcher-scan', `Scanning file changes in "${libwatcher.name}"`, null, true, taskData)
|
||||
}
|
||||
this.pendingFileUpdates.push({
|
||||
path,
|
||||
relPath,
|
||||
|
|
@ -210,18 +270,32 @@ class FolderWatcher extends EventEmitter {
|
|||
type
|
||||
})
|
||||
|
||||
// Notify server of update after "pendingDelay"
|
||||
this.handlePendingFileUpdatesTimeout()
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait X seconds before notifying scanner that files changed
|
||||
* reset timer if files are still copying
|
||||
*/
|
||||
handlePendingFileUpdatesTimeout() {
|
||||
clearTimeout(this.pendingTimeout)
|
||||
this.pendingTimeout = setTimeout(() => {
|
||||
// this.emit('files', this.pendingFileUpdates)
|
||||
LibraryScanner.scanFilesChanged(this.pendingFileUpdates)
|
||||
// Check that files are not still being added
|
||||
if (this.pendingFileUpdates.some(pfu => this.filesBeingAdded.has(pfu.path))) {
|
||||
Logger.debug(`[Watcher] Still waiting for pending files "${[...this.filesBeingAdded].join(', ')}"`)
|
||||
return this.handlePendingFileUpdatesTimeout()
|
||||
}
|
||||
|
||||
LibraryScanner.scanFilesChanged(this.pendingFileUpdates, this.pendingTask)
|
||||
this.pendingTask = null
|
||||
this.pendingFileUpdates = []
|
||||
this.filesBeingAdded.clear()
|
||||
}, this.pendingDelay)
|
||||
}
|
||||
|
||||
checkShouldIgnorePath(path) {
|
||||
return !!this.ignoreDirs.find(dirpath => {
|
||||
return filePathToPOSIX(path).startsWith(dirpath)
|
||||
return isSameOrSubPath(dirpath, path)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -67,30 +67,10 @@ class AuthorController {
|
|||
const payload = req.body
|
||||
let hasUpdated = false
|
||||
|
||||
// Updating/removing cover image
|
||||
if (payload.imagePath !== undefined && payload.imagePath !== req.author.imagePath) {
|
||||
if (!payload.imagePath && req.author.imagePath) { // If removing image then remove file
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
await CoverManager.removeFile(req.author.imagePath)
|
||||
} else if (payload.imagePath.startsWith('http')) { // Check if image path is a url
|
||||
const imageData = await AuthorFinder.saveAuthorImage(req.author.id, payload.imagePath)
|
||||
if (imageData) {
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
payload.imagePath = imageData.path
|
||||
hasUpdated = true
|
||||
}
|
||||
} else if (payload.imagePath && payload.imagePath !== req.author.imagePath) { // Changing image path locally
|
||||
if (!await fs.pathExists(payload.imagePath)) { // Make sure image path exists
|
||||
Logger.error(`[AuthorController] Image path does not exist: "${payload.imagePath}"`)
|
||||
return res.status(400).send('Author image path does not exist')
|
||||
}
|
||||
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
}
|
||||
// author imagePath must be set through other endpoints as of v2.4.5
|
||||
if (payload.imagePath !== undefined) {
|
||||
Logger.warn(`[AuthorController] Updating local author imagePath is not supported`)
|
||||
delete payload.imagePath
|
||||
}
|
||||
|
||||
const authorNameUpdate = payload.name !== undefined && payload.name !== req.author.name
|
||||
|
|
@ -131,7 +111,7 @@ class AuthorController {
|
|||
Database.removeAuthorFromFilterData(req.author.libraryId, req.author.id)
|
||||
|
||||
// Send updated num books for merged author
|
||||
const numBooks = await Database.libraryItemModel.getForAuthor(existingAuthor).length
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(existingAuthor)).length
|
||||
SocketAuthority.emitter('author_updated', existingAuthor.toJSONExpanded(numBooks))
|
||||
|
||||
res.json({
|
||||
|
|
@ -191,6 +171,75 @@ class AuthorController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/authors/:id/image
|
||||
* Upload author image from web URL
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async uploadImage(req, res) {
|
||||
if (!req.user.canUpload) {
|
||||
Logger.warn('User attempted to upload an image without permission', req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
if (!req.body.url) {
|
||||
Logger.error(`[AuthorController] Invalid request payload. 'url' not in request body`)
|
||||
return res.status(400).send(`Invalid request payload. 'url' not in request body`)
|
||||
}
|
||||
if (!req.body.url.startsWith?.('http:') && !req.body.url.startsWith?.('https:')) {
|
||||
Logger.error(`[AuthorController] Invalid request payload. Invalid url "${req.body.url}"`)
|
||||
return res.status(400).send(`Invalid request payload. Invalid url "${req.body.url}"`)
|
||||
}
|
||||
|
||||
Logger.debug(`[AuthorController] Requesting download author image from url "${req.body.url}"`)
|
||||
const result = await AuthorFinder.saveAuthorImage(req.author.id, req.body.url)
|
||||
|
||||
if (result?.error) {
|
||||
return res.status(400).send(result.error)
|
||||
} else if (!result?.path) {
|
||||
return res.status(500).send('Unknown error occurred')
|
||||
}
|
||||
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
|
||||
req.author.imagePath = result.path
|
||||
await Database.authorModel.updateFromOld(req.author)
|
||||
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
|
||||
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
|
||||
res.json({
|
||||
author: req.author.toJSON()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE: /api/authors/:id/image
|
||||
* Remove author image & delete image file
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async deleteImage(req, res) {
|
||||
if (!req.author.imagePath) {
|
||||
Logger.error(`[AuthorController] Author "${req.author.imagePath}" has no imagePath set`)
|
||||
return res.status(400).send('Author has no image path set')
|
||||
}
|
||||
Logger.info(`[AuthorController] Removing image for author "${req.author.name}" at "${req.author.imagePath}"`)
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
await CoverManager.removeFile(req.author.imagePath)
|
||||
req.author.imagePath = null
|
||||
await Database.authorModel.updateFromOld(req.author)
|
||||
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
|
||||
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
|
||||
res.json({
|
||||
author: req.author.toJSON()
|
||||
})
|
||||
}
|
||||
|
||||
async match(req, res) {
|
||||
let authorData = null
|
||||
const region = req.body.region || 'us'
|
||||
|
|
@ -215,7 +264,7 @@ class AuthorController {
|
|||
await CacheManager.purgeImageCache(req.author.id)
|
||||
|
||||
const imageData = await AuthorFinder.saveAuthorImage(req.author.id, authorData.image)
|
||||
if (imageData) {
|
||||
if (imageData?.path) {
|
||||
req.author.imagePath = imageData.path
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -231,7 +280,7 @@ class AuthorController {
|
|||
|
||||
await Database.updateAuthor(req.author)
|
||||
|
||||
const numBooks = await Database.libraryItemModel.getForAuthor(req.author).length
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
|
||||
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -51,32 +51,45 @@ class EmailController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Send ebook to device
|
||||
* User must have access to device and library item
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async sendEBookToDevice(req, res) {
|
||||
Logger.debug(`[EmailController] Send ebook to device request for libraryItemId=${req.body.libraryItemId}, deviceName=${req.body.deviceName}`)
|
||||
Logger.debug(`[EmailController] Send ebook to device requested by user "${req.user.username}" for libraryItemId=${req.body.libraryItemId}, deviceName=${req.body.deviceName}`)
|
||||
|
||||
const device = Database.emailSettings.getEReaderDevice(req.body.deviceName)
|
||||
if (!device) {
|
||||
return res.status(404).send('Ereader device not found')
|
||||
}
|
||||
|
||||
// Check user has access to device
|
||||
if (!Database.emailSettings.checkUserCanAccessDevice(device, req.user)) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(req.body.libraryItemId)
|
||||
if (!libraryItem) {
|
||||
return res.status(404).send('Library item not found')
|
||||
}
|
||||
|
||||
// Check user has access to library item
|
||||
if (!req.user.checkCanAccessLibraryItem(libraryItem)) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const ebookFile = libraryItem.media.ebookFile
|
||||
if (!ebookFile) {
|
||||
return res.status(404).send('EBook file not found')
|
||||
}
|
||||
|
||||
const device = Database.emailSettings.getEReaderDevice(req.body.deviceName)
|
||||
if (!device) {
|
||||
return res.status(404).send('E-reader device not found')
|
||||
return res.status(404).send('Ebook file not found')
|
||||
}
|
||||
|
||||
this.emailManager.sendEBookToDevice(ebookFile, device, res)
|
||||
}
|
||||
|
||||
middleware(req, res, next) {
|
||||
adminMiddleware(req, res, next) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ const libraryItemsBookFilters = require('../utils/queries/libraryItemsBookFilter
|
|||
const libraryItemFilters = require('../utils/queries/libraryItemFilters')
|
||||
const seriesFilters = require('../utils/queries/seriesFilters')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const { sort, createNewSortInstance } = require('../libs/fastSort')
|
||||
const { asciiOnlyToLowerCase } = require('../utils/index')
|
||||
const { createNewSortInstance } = require('../libs/fastSort')
|
||||
const naturalSort = createNewSortInstance({
|
||||
comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare
|
||||
})
|
||||
|
|
@ -555,7 +556,7 @@ class LibraryController {
|
|||
return res.status(400).send('No query string')
|
||||
}
|
||||
const limit = req.query.limit && !isNaN(req.query.limit) ? Number(req.query.limit) : 12
|
||||
const query = req.query.q.trim().toLowerCase()
|
||||
const query = asciiOnlyToLowerCase(req.query.q.trim())
|
||||
|
||||
const matches = await libraryItemFilters.search(req.user, req.library, query, limit)
|
||||
res.json(matches)
|
||||
|
|
@ -620,7 +621,7 @@ class LibraryController {
|
|||
model: Database.bookModel,
|
||||
attributes: ['id', 'tags', 'explicit'],
|
||||
where: bookWhere,
|
||||
required: false,
|
||||
required: !req.user.isAdminOrUp, // Only show authors with 0 books for admin users or up
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
|
|
@ -774,6 +775,13 @@ class LibraryController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/libraries/:id/matchall
|
||||
* Quick match all library items. Book libraries only.
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async matchAll(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-root user attempted to match library items`, req.user)
|
||||
|
|
@ -783,7 +791,14 @@ class LibraryController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
// POST: api/libraries/:id/scan
|
||||
/**
|
||||
* POST: /api/libraries/:id/scan
|
||||
* Optional query:
|
||||
* ?force=1
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async scan(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-root user attempted to scan library`, req.user)
|
||||
|
|
@ -791,7 +806,8 @@ class LibraryController {
|
|||
}
|
||||
res.sendStatus(200)
|
||||
|
||||
await LibraryScanner.scan(req.library)
|
||||
const forceRescan = req.query.force === '1'
|
||||
await LibraryScanner.scan(req.library, forceRescan)
|
||||
|
||||
await Database.resetLibraryIssuesFilterData(req.library.id)
|
||||
Logger.info('[LibraryController] Scan complete')
|
||||
|
|
@ -845,6 +861,56 @@ class LibraryController {
|
|||
res.send(opmlText)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all metadata.json or metadata.abs files in library item folders
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async removeAllMetadataFiles(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-admin user attempted to remove all metadata files`, req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const fileExt = req.query.ext === 'abs' ? 'abs' : 'json'
|
||||
const metadataFilename = `metadata.${fileExt}`
|
||||
const libraryItemsWithMetadata = await Database.libraryItemModel.findAll({
|
||||
attributes: ['id', 'libraryFiles'],
|
||||
where: [
|
||||
{
|
||||
libraryId: req.library.id
|
||||
},
|
||||
Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM json_each(libraryFiles) WHERE json_valid(libraryFiles) AND json_extract(json_each.value, "$.metadata.filename") = "${metadataFilename}")`), {
|
||||
[Sequelize.Op.gte]: 1
|
||||
})
|
||||
]
|
||||
})
|
||||
if (!libraryItemsWithMetadata.length) {
|
||||
Logger.info(`[LibraryController] No ${metadataFilename} files found to remove`)
|
||||
return res.json({
|
||||
found: 0
|
||||
})
|
||||
}
|
||||
|
||||
Logger.info(`[LibraryController] Found ${libraryItemsWithMetadata.length} ${metadataFilename} files to remove`)
|
||||
|
||||
let numRemoved = 0
|
||||
for (const libraryItem of libraryItemsWithMetadata) {
|
||||
const metadataFilepath = libraryItem.libraryFiles.find(lf => lf.metadata.filename === metadataFilename)?.metadata.path
|
||||
if (!metadataFilepath) continue
|
||||
Logger.debug(`[LibraryController] Removing file "${metadataFilepath}"`)
|
||||
if ((await fileUtils.removeFile(metadataFilepath))) {
|
||||
numRemoved++
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
found: libraryItemsWithMetadata.length,
|
||||
removed: numRemoved
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware that is not using libraryItems from memory
|
||||
* @param {import('express').Request} req
|
||||
|
|
|
|||
|
|
@ -85,12 +85,31 @@ class LibraryItemController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/items/:id/download
|
||||
* Download library item. Zip file if multiple files.
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
download(req, res) {
|
||||
if (!req.user.canDownload) {
|
||||
Logger.warn('User attempted to download without permission', req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// If library item is a single file in root dir then no need to zip
|
||||
if (req.libraryItem.isFile) {
|
||||
// Express does not set the correct mimetype for m4b files so use our defined mimetypes if available
|
||||
const audioMimeType = getAudioMimeTypeFromExtname(Path.extname(req.libraryItem.path))
|
||||
if (audioMimeType) {
|
||||
res.setHeader('Content-Type', audioMimeType)
|
||||
}
|
||||
|
||||
res.download(req.libraryItem.path, req.libraryItem.relPath)
|
||||
return
|
||||
}
|
||||
|
||||
const libraryItemPath = req.libraryItem.path
|
||||
const itemTitle = req.libraryItem.media.metadata.title
|
||||
Logger.info(`[LibraryItemController] User "${req.user.username}" requested download for item "${itemTitle}" at "${libraryItemPath}"`)
|
||||
|
|
@ -163,22 +182,22 @@ class LibraryItemController {
|
|||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
var libraryItem = req.libraryItem
|
||||
let libraryItem = req.libraryItem
|
||||
|
||||
var result = null
|
||||
if (req.body && req.body.url) {
|
||||
let result = null
|
||||
if (req.body?.url) {
|
||||
Logger.debug(`[LibraryItemController] Requesting download cover from url "${req.body.url}"`)
|
||||
result = await CoverManager.downloadCoverFromUrl(libraryItem, req.body.url)
|
||||
} else if (req.files && req.files.cover) {
|
||||
} else if (req.files?.cover) {
|
||||
Logger.debug(`[LibraryItemController] Handling uploaded cover`)
|
||||
result = await CoverManager.uploadCover(libraryItem, req.files.cover)
|
||||
} else {
|
||||
return res.status(400).send('Invalid request no file or url')
|
||||
}
|
||||
|
||||
if (result && result.error) {
|
||||
if (result?.error) {
|
||||
return res.status(400).send(result.error)
|
||||
} else if (!result || !result.cover) {
|
||||
} else if (!result?.cover) {
|
||||
return res.status(500).send('Unknown error occurred')
|
||||
}
|
||||
|
||||
|
|
@ -259,7 +278,6 @@ class LibraryItemController {
|
|||
|
||||
// Check if library item media has a cover path
|
||||
if (!libraryItem.media.coverPath || !await fs.pathExists(libraryItem.media.coverPath)) {
|
||||
Logger.debug(`[LibraryItemController] getCover: Library item "${req.params.id}" has no cover path`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ const libraryItemFilters = require('../utils/queries/libraryItemFilters')
|
|||
const patternValidation = require('../libs/nodeCron/pattern-validation')
|
||||
const { isObject, getTitleIgnorePrefix } = require('../utils/index')
|
||||
|
||||
const TaskManager = require('../managers/TaskManager')
|
||||
|
||||
//
|
||||
// This is a controller for routes that don't have a home yet :(
|
||||
//
|
||||
|
|
@ -102,7 +104,7 @@ class MiscController {
|
|||
const includeArray = (req.query.include || '').split(',')
|
||||
|
||||
const data = {
|
||||
tasks: this.taskManager.tasks.map(t => t.toJSON())
|
||||
tasks: TaskManager.tasks.map(t => t.toJSON())
|
||||
}
|
||||
|
||||
if (includeArray.includes('queue')) {
|
||||
|
|
@ -526,6 +528,54 @@ class MiscController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/watcher/update
|
||||
* Update a watch path
|
||||
* Req.body { libraryId, path, type, [oldPath] }
|
||||
* type = add, unlink, rename
|
||||
* oldPath = required only for rename
|
||||
* @this import('../routers/ApiRouter')
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
updateWatchedPath(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user attempted to updateWatchedPath`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
const libraryId = req.body.libraryId
|
||||
const path = req.body.path
|
||||
const type = req.body.type
|
||||
if (!libraryId || !path || !type) {
|
||||
Logger.error(`[MiscController] Invalid request body for updateWatchedPath. libraryId: "${libraryId}", path: "${path}", type: "${type}"`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'add':
|
||||
this.watcher.onFileAdded(libraryId, path)
|
||||
break;
|
||||
case 'unlink':
|
||||
this.watcher.onFileRemoved(libraryId, path)
|
||||
break;
|
||||
case 'rename':
|
||||
const oldPath = req.body.oldPath
|
||||
if (!oldPath) {
|
||||
Logger.error(`[MiscController] Invalid request body for updateWatchedPath. oldPath is required for rename.`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
this.watcher.onFileRename(libraryId, oldPath, path)
|
||||
break;
|
||||
default:
|
||||
Logger.error(`[MiscController] Invalid type for updateWatchedPath. type: "${type}"`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
validateCronExpression(req, res) {
|
||||
const expression = req.body.expression
|
||||
if (!expression) {
|
||||
|
|
|
|||
|
|
@ -184,10 +184,9 @@ class PodcastController {
|
|||
Logger.error(`[PodcastController] Non-admin user attempted to download episodes`, req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
var libraryItem = req.libraryItem
|
||||
|
||||
var episodes = req.body
|
||||
if (!episodes || !episodes.length) {
|
||||
const libraryItem = req.libraryItem
|
||||
const episodes = req.body
|
||||
if (!episodes?.length) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
|
|
@ -286,7 +285,7 @@ class PodcastController {
|
|||
const numItems = pmi.playlist.playlistMediaItems.length - 1
|
||||
|
||||
if (!numItems) {
|
||||
Logger.info(`[PodcastController] Playlist "${playlist.name}" has no more items - removing it`)
|
||||
Logger.info(`[PodcastController] Playlist "${pmi.playlist.name}" has no more items - removing it`)
|
||||
const jsonExpanded = await pmi.playlist.getOldJsonExpanded()
|
||||
SocketAuthority.clientEmitter(pmi.playlist.userId, 'playlist_removed', jsonExpanded)
|
||||
await pmi.playlist.destroy()
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class SearchController {
|
|||
|
||||
let results = null
|
||||
if (podcast) results = await PodcastFinder.findCovers(query.title)
|
||||
else results = await BookFinder.findCovers(query.provider || 'google', query.title, query.author || null)
|
||||
else results = await BookFinder.findCovers(query.provider || 'google', query.title, query.author || '')
|
||||
res.json({
|
||||
results
|
||||
})
|
||||
|
|
|
|||
|
|
@ -115,6 +115,13 @@ class UserController {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH: /api/users/:id
|
||||
* Update user
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async update(req, res) {
|
||||
const user = req.reqUser
|
||||
|
||||
|
|
@ -126,6 +133,7 @@ class UserController {
|
|||
var account = req.body
|
||||
var shouldUpdateToken = false
|
||||
|
||||
// When changing username create a new API token
|
||||
if (account.username !== undefined && account.username !== user.username) {
|
||||
const usernameExists = await Database.userModel.getUserByUsername(account.username)
|
||||
if (usernameExists) {
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
const itemDb = require('../db/item.db')
|
||||
|
||||
const getLibraryItem = async (req, res) => {
|
||||
let libraryItem = null
|
||||
if (req.query.expanded == 1) {
|
||||
libraryItem = await itemDb.getLibraryItemExpanded(req.params.id)
|
||||
} else {
|
||||
libraryItem = await itemDb.getLibraryItemMinified(req.params.id)
|
||||
}
|
||||
|
||||
res.json(libraryItem)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLibraryItem
|
||||
}
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
/**
|
||||
* TODO: Unused for testing
|
||||
*/
|
||||
const { Sequelize } = require('sequelize')
|
||||
const Database = require('../Database')
|
||||
|
||||
const getLibraryItemMinified = (libraryItemId) => {
|
||||
return Database.libraryItemModel.findByPk(libraryItemId, {
|
||||
include: [
|
||||
{
|
||||
model: Database.bookModel,
|
||||
attributes: [
|
||||
'id', 'title', 'subtitle', 'publishedYear', 'publishedDate', 'publisher', 'description', 'isbn', 'asin', 'language', 'explicit', 'narrators', 'coverPath', 'genres', 'tags'
|
||||
],
|
||||
include: [
|
||||
{
|
||||
model: Database.authorModel,
|
||||
attributes: ['id', 'name'],
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
{
|
||||
model: Database.seriesModel,
|
||||
attributes: ['id', 'name'],
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
model: Database.podcastModel,
|
||||
attributes: [
|
||||
'id', 'title', 'author', 'releaseDate', 'feedURL', 'imageURL', 'description', 'itunesPageURL', 'itunesId', 'itunesArtistId', 'language', 'podcastType', 'explicit', 'autoDownloadEpisodes', 'genres', 'tags',
|
||||
[Sequelize.literal('(SELECT COUNT(*) FROM "podcastEpisodes" WHERE "podcastEpisodes"."podcastId" = podcast.id)'), 'numPodcastEpisodes']
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
const getLibraryItemExpanded = (libraryItemId) => {
|
||||
return Database.libraryItemModel.findByPk(libraryItemId, {
|
||||
include: [
|
||||
{
|
||||
model: Database.bookModel,
|
||||
include: [
|
||||
{
|
||||
model: Database.authorModel,
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
{
|
||||
model: Database.seriesModel,
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
model: Database.podcastModel,
|
||||
include: [
|
||||
{
|
||||
model: Database.podcastEpisodeModel
|
||||
}
|
||||
]
|
||||
},
|
||||
'libraryFolder',
|
||||
'library'
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLibraryItemMinified,
|
||||
getLibraryItemExpanded
|
||||
}
|
||||
|
|
@ -3,20 +3,13 @@ const Logger = require('../Logger')
|
|||
const Path = require('path')
|
||||
const Audnexus = require('../providers/Audnexus')
|
||||
|
||||
const { downloadFile } = require('../utils/fileUtils')
|
||||
const { downloadImageFile } = require('../utils/fileUtils')
|
||||
|
||||
class AuthorFinder {
|
||||
constructor() {
|
||||
this.audnexus = new Audnexus()
|
||||
}
|
||||
|
||||
async downloadImage(url, outputPath) {
|
||||
return downloadFile(url, outputPath).then(() => true).catch((error) => {
|
||||
Logger.error('[AuthorFinder] Failed to download author image', error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
findAuthorByASIN(asin, region) {
|
||||
if (!asin) return null
|
||||
return this.audnexus.findAuthorByASIN(asin, region)
|
||||
|
|
@ -33,28 +26,36 @@ class AuthorFinder {
|
|||
return author
|
||||
}
|
||||
|
||||
/**
|
||||
* Download author image from url and save in authors folder
|
||||
*
|
||||
* @param {string} authorId
|
||||
* @param {string} url
|
||||
* @returns {Promise<{path:string, error:string}>}
|
||||
*/
|
||||
async saveAuthorImage(authorId, url) {
|
||||
var authorDir = Path.join(global.MetadataPath, 'authors')
|
||||
var relAuthorDir = Path.posix.join('/metadata', 'authors')
|
||||
const authorDir = Path.join(global.MetadataPath, 'authors')
|
||||
|
||||
if (!await fs.pathExists(authorDir)) {
|
||||
await fs.ensureDir(authorDir)
|
||||
}
|
||||
|
||||
var imageExtension = url.toLowerCase().split('.').pop()
|
||||
var ext = imageExtension === 'png' ? 'png' : 'jpg'
|
||||
var filename = authorId + '.' + ext
|
||||
var outputPath = Path.posix.join(authorDir, filename)
|
||||
var relPath = Path.posix.join(relAuthorDir, filename)
|
||||
const imageExtension = url.toLowerCase().split('.').pop()
|
||||
const ext = imageExtension === 'png' ? 'png' : 'jpg'
|
||||
const filename = authorId + '.' + ext
|
||||
const outputPath = Path.posix.join(authorDir, filename)
|
||||
|
||||
var success = await this.downloadImage(url, outputPath)
|
||||
if (!success) {
|
||||
return null
|
||||
}
|
||||
return {
|
||||
path: outputPath,
|
||||
relPath
|
||||
}
|
||||
return downloadImageFile(url, outputPath).then(() => {
|
||||
return {
|
||||
path: outputPath
|
||||
}
|
||||
}).catch((err) => {
|
||||
let errorMsg = err.message || 'Unknown error'
|
||||
Logger.error(`[AuthorFinder] Download image file failed for "${url}"`, errorMsg)
|
||||
return {
|
||||
error: errorMsg
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = new AuthorFinder()
|
||||
|
|
@ -6,7 +6,7 @@ const Audnexus = require('../providers/Audnexus')
|
|||
const FantLab = require('../providers/FantLab')
|
||||
const AudiobookCovers = require('../providers/AudiobookCovers')
|
||||
const Logger = require('../Logger')
|
||||
const { levenshteinDistance } = require('../utils/index')
|
||||
const { levenshteinDistance, escapeRegExp } = require('../utils/index')
|
||||
|
||||
class BookFinder {
|
||||
constructor() {
|
||||
|
|
@ -59,12 +59,17 @@ class BookFinder {
|
|||
|
||||
// Remove single quotes (i.e. "Ender's Game" becomes "Enders Game")
|
||||
cleaned = cleaned.replace(/'/g, '')
|
||||
return this.replaceAccentedChars(cleaned)
|
||||
return this.replaceAccentedChars(cleaned).toLowerCase()
|
||||
}
|
||||
|
||||
cleanAuthorForCompares(author) {
|
||||
if (!author) return ''
|
||||
return this.replaceAccentedChars(author)
|
||||
let cleanAuthor = this.replaceAccentedChars(author).toLowerCase()
|
||||
// separate initials
|
||||
cleanAuthor = cleanAuthor.replace(/([a-z])\.([a-z])/g, '$1. $2')
|
||||
// remove middle initials
|
||||
cleanAuthor = cleanAuthor.replace(/(?<=\w\w)(\s+[a-z]\.?)+(?=\s+\w\w)/g, '')
|
||||
return cleanAuthor
|
||||
}
|
||||
|
||||
filterSearchResults(books, title, author, maxTitleDistance, maxAuthorDistance) {
|
||||
|
|
@ -136,6 +141,10 @@ class BookFinder {
|
|||
if (!booksFiltered.length && books.length) {
|
||||
if (this.verbose) Logger.debug(`Search has ${books.length} matches, but no close title matches`)
|
||||
}
|
||||
booksFiltered.sort((a, b) => {
|
||||
return a.totalDistance - b.totalDistance
|
||||
})
|
||||
|
||||
return booksFiltered
|
||||
}
|
||||
|
||||
|
|
@ -179,35 +188,152 @@ class BookFinder {
|
|||
return books
|
||||
}
|
||||
|
||||
addTitleCandidate(title, candidates) {
|
||||
// Main variant
|
||||
const cleanTitle = this.cleanTitleForCompares(title).trim()
|
||||
if (!cleanTitle) return
|
||||
candidates.add(cleanTitle)
|
||||
static TitleCandidates = class {
|
||||
|
||||
let candidate = cleanTitle
|
||||
constructor(bookFinder, cleanAuthor) {
|
||||
this.bookFinder = bookFinder
|
||||
this.candidates = new Set()
|
||||
this.cleanAuthor = cleanAuthor
|
||||
this.priorities = {}
|
||||
this.positions = {}
|
||||
}
|
||||
|
||||
// Remove subtitle
|
||||
candidate = candidate.replace(/([,:;_]| by ).*/g, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
add(title, position = 0) {
|
||||
// if title contains the author, remove it
|
||||
if (this.cleanAuthor) {
|
||||
const authorRe = new RegExp(`(^| | by |)${escapeRegExp(this.cleanAuthor)}(?= |$)`, "g")
|
||||
title = this.bookFinder.cleanAuthorForCompares(title).replace(authorRe, '').trim()
|
||||
}
|
||||
|
||||
// Remove preceding/trailing numbers
|
||||
candidate = candidate.replace(/^\d+ | \d+$/g, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
const titleTransformers = [
|
||||
[/([,:;_]| by ).*/g, ''], // Remove subtitle
|
||||
[/(^| )\d+k(bps)?( |$)/, ' '], // Remove bitrate
|
||||
[/ (2nd|3rd|\d+th)\s+ed(\.|ition)?/g, ''], // Remove edition
|
||||
[/(^| |\.)(m4b|m4a|mp3)( |$)/g, ''], // Remove file-type
|
||||
[/ a novel.*$/g, ''], // Remove "a novel"
|
||||
[/^\d+ | \d+$/g, ''], // Remove preceding/trailing numbers
|
||||
]
|
||||
|
||||
// Remove bitrate
|
||||
candidate = candidate.replace(/(^| )\d+k(bps)?( |$)/, " ").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
// Main variant
|
||||
const cleanTitle = this.bookFinder.cleanTitleForCompares(title).trim()
|
||||
if (!cleanTitle) return
|
||||
this.candidates.add(cleanTitle)
|
||||
this.priorities[cleanTitle] = 0
|
||||
this.positions[cleanTitle] = position
|
||||
|
||||
// Remove edition
|
||||
candidate = candidate.replace(/ (2nd|3rd|\d+th)\s+ed(\.|ition)?/, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
let candidate = cleanTitle
|
||||
|
||||
for (const transformer of titleTransformers)
|
||||
candidate = candidate.replace(transformer[0], transformer[1]).trim()
|
||||
|
||||
if (candidate != cleanTitle) {
|
||||
if (candidate) {
|
||||
this.candidates.add(candidate)
|
||||
this.priorities[candidate] = 0
|
||||
this.positions[candidate] = position
|
||||
}
|
||||
this.priorities[cleanTitle] = 1
|
||||
}
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.candidates.size
|
||||
}
|
||||
|
||||
getCandidates() {
|
||||
var candidates = [...this.candidates]
|
||||
candidates.sort((a, b) => {
|
||||
// Candidates that include the author are likely low quality
|
||||
const includesAuthorDiff = !b.includes(this.cleanAuthor) - !a.includes(this.cleanAuthor)
|
||||
if (includesAuthorDiff) return includesAuthorDiff
|
||||
// Candidates that include only digits are also likely low quality
|
||||
const onlyDigits = /^\d+$/
|
||||
const includesOnlyDigitsDiff = !onlyDigits.test(b) - !onlyDigits.test(a)
|
||||
if (includesOnlyDigitsDiff) return includesOnlyDigitsDiff
|
||||
// transformed candidates receive higher priority
|
||||
const priorityDiff = this.priorities[a] - this.priorities[b]
|
||||
if (priorityDiff) return priorityDiff
|
||||
// if same priorirty, prefer candidates that are closer to the beginning (e.g. titles before subtitles)
|
||||
const positionDiff = this.positions[a] - this.positions[b]
|
||||
if (positionDiff) return positionDiff
|
||||
// Start with longer candidaets, as they are likely more specific
|
||||
const lengthDiff = b.length - a.length
|
||||
if (lengthDiff) return lengthDiff
|
||||
return b.localeCompare(a)
|
||||
})
|
||||
Logger.debug(`[${this.constructor.name}] Found ${candidates.length} fuzzy title candidates`)
|
||||
Logger.debug(candidates)
|
||||
return candidates
|
||||
}
|
||||
|
||||
delete(title) {
|
||||
return this.candidates.delete(title)
|
||||
}
|
||||
}
|
||||
|
||||
static AuthorCandidates = class {
|
||||
constructor(bookFinder, cleanAuthor) {
|
||||
this.bookFinder = bookFinder
|
||||
this.candidates = new Set()
|
||||
this.cleanAuthor = cleanAuthor
|
||||
if (cleanAuthor) this.candidates.add(cleanAuthor)
|
||||
}
|
||||
|
||||
validateAuthor(name, region = '', maxLevenshtein = 2) {
|
||||
return this.bookFinder.audnexus.authorASINsRequest(name, region).then((asins) => {
|
||||
for (const [i, asin] of asins.entries()) {
|
||||
if (i > 10) break
|
||||
let cleanName = this.bookFinder.cleanAuthorForCompares(asin.name)
|
||||
if (!cleanName) continue
|
||||
if (cleanName.includes(name)) return name
|
||||
if (name.includes(cleanName)) return cleanName
|
||||
if (levenshteinDistance(cleanName, name) <= maxLevenshtein) return cleanName
|
||||
}
|
||||
return ''
|
||||
})
|
||||
}
|
||||
|
||||
add(author) {
|
||||
const cleanAuthor = this.bookFinder.cleanAuthorForCompares(author).trim()
|
||||
if (!cleanAuthor) return
|
||||
this.candidates.add(cleanAuthor)
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.candidates.size
|
||||
}
|
||||
|
||||
get agressivelyCleanAuthor() {
|
||||
if (this.cleanAuthor) {
|
||||
const agressivelyCleanAuthor = this.cleanAuthor.replace(/[,/-].*$/, '').trim()
|
||||
return agressivelyCleanAuthor ? agressivelyCleanAuthor : this.cleanAuthor
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
async getCandidates() {
|
||||
var filteredCandidates = []
|
||||
var promises = []
|
||||
for (const candidate of this.candidates) {
|
||||
promises.push(this.validateAuthor(candidate))
|
||||
}
|
||||
const results = [...new Set(await Promise.all(promises))]
|
||||
filteredCandidates = results.filter(author => author)
|
||||
// If no valid candidates were found, add back an aggresively cleaned author version
|
||||
if (!filteredCandidates.length && this.cleanAuthor) filteredCandidates.push(this.agressivelyCleanAuthor)
|
||||
// Always add an empty author candidate
|
||||
filteredCandidates.push('')
|
||||
Logger.debug(`[${this.constructor.name}] Found ${filteredCandidates.length} fuzzy author candidates`)
|
||||
Logger.debug(filteredCandidates)
|
||||
return filteredCandidates
|
||||
}
|
||||
|
||||
delete(author) {
|
||||
return this.candidates.delete(author)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Search for books including fuzzy searches
|
||||
*
|
||||
|
|
@ -232,62 +358,36 @@ class BookFinder {
|
|||
books = await this.runSearch(title, author, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
|
||||
if (!books.length && maxFuzzySearches > 0) {
|
||||
// normalize title and author
|
||||
// Normalize title and author
|
||||
title = title.trim().toLowerCase()
|
||||
author = author.trim().toLowerCase()
|
||||
author = author?.trim().toLowerCase() || ''
|
||||
|
||||
const cleanAuthor = this.cleanAuthorForCompares(author)
|
||||
|
||||
// Now run up to maxFuzzySearches fuzzy searches
|
||||
let candidates = new Set()
|
||||
let cleanedAuthor = this.cleanAuthorForCompares(author)
|
||||
this.addTitleCandidate(title, candidates)
|
||||
let authorCandidates = new BookFinder.AuthorCandidates(this, cleanAuthor)
|
||||
|
||||
// remove parentheses and their contents, and replace with a separator
|
||||
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}/g, " - ")
|
||||
// Remove underscores and parentheses with their contents, and replace with a separator
|
||||
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}|_/g, " - ")
|
||||
// Split title into hypen-separated parts
|
||||
const titleParts = cleanTitle.split(/ - | -|- /)
|
||||
for (const titlePart of titleParts) {
|
||||
this.addTitleCandidate(titlePart, candidates)
|
||||
}
|
||||
// We already searched for original title
|
||||
if (author == cleanedAuthor) candidates.delete(title)
|
||||
if (candidates.size > 0) {
|
||||
candidates = [...candidates]
|
||||
candidates.sort((a, b) => {
|
||||
// Candidates that include the author are likely low quality
|
||||
const includesAuthorDiff = !b.includes(cleanedAuthor) - !a.includes(cleanedAuthor)
|
||||
if (includesAuthorDiff) return includesAuthorDiff
|
||||
// Candidates that include only digits are also likely low quality
|
||||
const onlyDigits = /^\d+$/
|
||||
const includesOnlyDigitsDiff = !onlyDigits.test(b) - !onlyDigits.test(a)
|
||||
if (includesOnlyDigitsDiff) return includesOnlyDigitsDiff
|
||||
// Start with longer candidaets, as they are likely more specific
|
||||
const lengthDiff = b.length - a.length
|
||||
if (lengthDiff) return lengthDiff
|
||||
return b.localeCompare(a)
|
||||
})
|
||||
Logger.debug(`[BookFinder] Found ${candidates.length} fuzzy title candidates`, candidates)
|
||||
for (const candidate of candidates) {
|
||||
for (const titlePart of titleParts)
|
||||
authorCandidates.add(titlePart)
|
||||
authorCandidates = await authorCandidates.getCandidates()
|
||||
for (const authorCandidate of authorCandidates) {
|
||||
let titleCandidates = new BookFinder.TitleCandidates(this, authorCandidate)
|
||||
for (const [position, titlePart] of titleParts.entries())
|
||||
titleCandidates.add(titlePart, position)
|
||||
titleCandidates = titleCandidates.getCandidates()
|
||||
for (const titleCandidate of titleCandidates) {
|
||||
if (titleCandidate == title && authorCandidate == author) continue // We already tried this
|
||||
if (++numFuzzySearches > maxFuzzySearches) return books
|
||||
books = await this.runSearch(candidate, cleanedAuthor, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break
|
||||
}
|
||||
if (!books.length) {
|
||||
// Now try searching without the author
|
||||
for (const candidate of candidates) {
|
||||
if (++numFuzzySearches > maxFuzzySearches) return books
|
||||
books = await this.runSearch(candidate, '', provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break
|
||||
}
|
||||
books = await this.runSearch(titleCandidate, authorCandidate, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) return books
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'openlibrary') {
|
||||
books.sort((a, b) => {
|
||||
return a.totalDistance - b.totalDistance
|
||||
})
|
||||
}
|
||||
|
||||
return books
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
exports.parse = require('./parse');
|
||||
exports.stringify = require('./stringify');
|
||||
|
|
@ -1,603 +0,0 @@
|
|||
// http://www.w3.org/TR/CSS21/grammar.html
|
||||
// https://github.com/visionmedia/css-parse/pull/49#issuecomment-30088027
|
||||
var commentre = /\/\*[^*]*\*+([^/*][^*]*\*+)*\//g
|
||||
|
||||
module.exports = function(css, options){
|
||||
options = options || {};
|
||||
|
||||
/**
|
||||
* Positional.
|
||||
*/
|
||||
|
||||
var lineno = 1;
|
||||
var column = 1;
|
||||
|
||||
/**
|
||||
* Update lineno and column based on `str`.
|
||||
*/
|
||||
|
||||
function updatePosition(str) {
|
||||
var lines = str.match(/\n/g);
|
||||
if (lines) lineno += lines.length;
|
||||
var i = str.lastIndexOf('\n');
|
||||
column = ~i ? str.length - i : column + str.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark position and patch `node.position`.
|
||||
*/
|
||||
|
||||
function position() {
|
||||
var start = { line: lineno, column: column };
|
||||
return function(node){
|
||||
node.position = new Position(start);
|
||||
whitespace();
|
||||
return node;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Store position information for a node
|
||||
*/
|
||||
|
||||
function Position(start) {
|
||||
this.start = start;
|
||||
this.end = { line: lineno, column: column };
|
||||
this.source = options.source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Non-enumerable source string
|
||||
*/
|
||||
|
||||
Position.prototype.content = css;
|
||||
|
||||
/**
|
||||
* Error `msg`.
|
||||
*/
|
||||
|
||||
var errorsList = [];
|
||||
|
||||
function error(msg) {
|
||||
var err = new Error(options.source + ':' + lineno + ':' + column + ': ' + msg);
|
||||
err.reason = msg;
|
||||
err.filename = options.source;
|
||||
err.line = lineno;
|
||||
err.column = column;
|
||||
err.source = css;
|
||||
|
||||
if (options.silent) {
|
||||
errorsList.push(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse stylesheet.
|
||||
*/
|
||||
|
||||
function stylesheet() {
|
||||
var rulesList = rules();
|
||||
|
||||
return {
|
||||
type: 'stylesheet',
|
||||
stylesheet: {
|
||||
source: options.source,
|
||||
rules: rulesList,
|
||||
parsingErrors: errorsList
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Opening brace.
|
||||
*/
|
||||
|
||||
function open() {
|
||||
return match(/^{\s*/);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closing brace.
|
||||
*/
|
||||
|
||||
function close() {
|
||||
return match(/^}/);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse ruleset.
|
||||
*/
|
||||
|
||||
function rules() {
|
||||
var node;
|
||||
var rules = [];
|
||||
whitespace();
|
||||
comments(rules);
|
||||
while (css.length && css.charAt(0) != '}' && (node = atrule() || rule())) {
|
||||
if (node !== false) {
|
||||
rules.push(node);
|
||||
comments(rules);
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Match `re` and return captures.
|
||||
*/
|
||||
|
||||
function match(re) {
|
||||
var m = re.exec(css);
|
||||
if (!m) return;
|
||||
var str = m[0];
|
||||
updatePosition(str);
|
||||
css = css.slice(str.length);
|
||||
return m;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse whitespace.
|
||||
*/
|
||||
|
||||
function whitespace() {
|
||||
match(/^\s*/);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comments;
|
||||
*/
|
||||
|
||||
function comments(rules) {
|
||||
var c;
|
||||
rules = rules || [];
|
||||
while (c = comment()) {
|
||||
if (c !== false) {
|
||||
rules.push(c);
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comment.
|
||||
*/
|
||||
|
||||
function comment() {
|
||||
var pos = position();
|
||||
if ('/' != css.charAt(0) || '*' != css.charAt(1)) return;
|
||||
|
||||
var i = 2;
|
||||
while ("" != css.charAt(i) && ('*' != css.charAt(i) || '/' != css.charAt(i + 1))) ++i;
|
||||
i += 2;
|
||||
|
||||
if ("" === css.charAt(i-1)) {
|
||||
return error('End of comment missing');
|
||||
}
|
||||
|
||||
var str = css.slice(2, i - 2);
|
||||
column += 2;
|
||||
updatePosition(str);
|
||||
css = css.slice(i);
|
||||
column += 2;
|
||||
|
||||
return pos({
|
||||
type: 'comment',
|
||||
comment: str
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse selector.
|
||||
*/
|
||||
|
||||
function selector() {
|
||||
var m = match(/^([^{]+)/);
|
||||
if (!m) return;
|
||||
/* @fix Remove all comments from selectors
|
||||
* http://ostermiller.org/findcomment.html */
|
||||
return trim(m[0])
|
||||
.replace(/\/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*\/+/g, '')
|
||||
.replace(/"(?:\\"|[^"])*"|'(?:\\'|[^'])*'/g, function(m) {
|
||||
return m.replace(/,/g, '\u200C');
|
||||
})
|
||||
.split(/\s*(?![^(]*\)),\s*/)
|
||||
.map(function(s) {
|
||||
return s.replace(/\u200C/g, ',');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse declaration.
|
||||
*/
|
||||
|
||||
function declaration() {
|
||||
var pos = position();
|
||||
|
||||
// prop
|
||||
var prop = match(/^(\*?[-#\/\*\\\w]+(\[[0-9a-z_-]+\])?)\s*/);
|
||||
if (!prop) return;
|
||||
prop = trim(prop[0]);
|
||||
|
||||
// :
|
||||
if (!match(/^:\s*/)) return error("property missing ':'");
|
||||
|
||||
// val
|
||||
var val = match(/^((?:'(?:\\'|.)*?'|"(?:\\"|.)*?"|\([^\)]*?\)|[^};])+)/);
|
||||
|
||||
var ret = pos({
|
||||
type: 'declaration',
|
||||
property: prop.replace(commentre, ''),
|
||||
value: val ? trim(val[0]).replace(commentre, '') : ''
|
||||
});
|
||||
|
||||
// ;
|
||||
match(/^[;\s]*/);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse declarations.
|
||||
*/
|
||||
|
||||
function declarations() {
|
||||
var decls = [];
|
||||
|
||||
if (!open()) return error("missing '{'");
|
||||
comments(decls);
|
||||
|
||||
// declarations
|
||||
var decl;
|
||||
while (decl = declaration()) {
|
||||
if (decl !== false) {
|
||||
decls.push(decl);
|
||||
comments(decls);
|
||||
}
|
||||
}
|
||||
|
||||
if (!close()) return error("missing '}'");
|
||||
return decls;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse keyframe.
|
||||
*/
|
||||
|
||||
function keyframe() {
|
||||
var m;
|
||||
var vals = [];
|
||||
var pos = position();
|
||||
|
||||
while (m = match(/^((\d+\.\d+|\.\d+|\d+)%?|[a-z]+)\s*/)) {
|
||||
vals.push(m[1]);
|
||||
match(/^,\s*/);
|
||||
}
|
||||
|
||||
if (!vals.length) return;
|
||||
|
||||
return pos({
|
||||
type: 'keyframe',
|
||||
values: vals,
|
||||
declarations: declarations()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse keyframes.
|
||||
*/
|
||||
|
||||
function atkeyframes() {
|
||||
var pos = position();
|
||||
var m = match(/^@([-\w]+)?keyframes\s*/);
|
||||
|
||||
if (!m) return;
|
||||
var vendor = m[1];
|
||||
|
||||
// identifier
|
||||
var m = match(/^([-\w]+)\s*/);
|
||||
if (!m) return error("@keyframes missing name");
|
||||
var name = m[1];
|
||||
|
||||
if (!open()) return error("@keyframes missing '{'");
|
||||
|
||||
var frame;
|
||||
var frames = comments();
|
||||
while (frame = keyframe()) {
|
||||
frames.push(frame);
|
||||
frames = frames.concat(comments());
|
||||
}
|
||||
|
||||
if (!close()) return error("@keyframes missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'keyframes',
|
||||
name: name,
|
||||
vendor: vendor,
|
||||
keyframes: frames
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse supports.
|
||||
*/
|
||||
|
||||
function atsupports() {
|
||||
var pos = position();
|
||||
var m = match(/^@supports *([^{]+)/);
|
||||
|
||||
if (!m) return;
|
||||
var supports = trim(m[1]);
|
||||
|
||||
if (!open()) return error("@supports missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@supports missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'supports',
|
||||
supports: supports,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse host.
|
||||
*/
|
||||
|
||||
function athost() {
|
||||
var pos = position();
|
||||
var m = match(/^@host\s*/);
|
||||
|
||||
if (!m) return;
|
||||
|
||||
if (!open()) return error("@host missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@host missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'host',
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse media.
|
||||
*/
|
||||
|
||||
function atmedia() {
|
||||
var pos = position();
|
||||
var m = match(/^@media *([^{]+)/);
|
||||
|
||||
if (!m) return;
|
||||
var media = trim(m[1]);
|
||||
|
||||
if (!open()) return error("@media missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@media missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'media',
|
||||
media: media,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse custom-media.
|
||||
*/
|
||||
|
||||
function atcustommedia() {
|
||||
var pos = position();
|
||||
var m = match(/^@custom-media\s+(--[^\s]+)\s*([^{;]+);/);
|
||||
if (!m) return;
|
||||
|
||||
return pos({
|
||||
type: 'custom-media',
|
||||
name: trim(m[1]),
|
||||
media: trim(m[2])
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse paged media.
|
||||
*/
|
||||
|
||||
function atpage() {
|
||||
var pos = position();
|
||||
var m = match(/^@page */);
|
||||
if (!m) return;
|
||||
|
||||
var sel = selector() || [];
|
||||
|
||||
if (!open()) return error("@page missing '{'");
|
||||
var decls = comments();
|
||||
|
||||
// declarations
|
||||
var decl;
|
||||
while (decl = declaration()) {
|
||||
decls.push(decl);
|
||||
decls = decls.concat(comments());
|
||||
}
|
||||
|
||||
if (!close()) return error("@page missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'page',
|
||||
selectors: sel,
|
||||
declarations: decls
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse document.
|
||||
*/
|
||||
|
||||
function atdocument() {
|
||||
var pos = position();
|
||||
var m = match(/^@([-\w]+)?document *([^{]+)/);
|
||||
if (!m) return;
|
||||
|
||||
var vendor = trim(m[1]);
|
||||
var doc = trim(m[2]);
|
||||
|
||||
if (!open()) return error("@document missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@document missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'document',
|
||||
document: doc,
|
||||
vendor: vendor,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse font-face.
|
||||
*/
|
||||
|
||||
function atfontface() {
|
||||
var pos = position();
|
||||
var m = match(/^@font-face\s*/);
|
||||
if (!m) return;
|
||||
|
||||
if (!open()) return error("@font-face missing '{'");
|
||||
var decls = comments();
|
||||
|
||||
// declarations
|
||||
var decl;
|
||||
while (decl = declaration()) {
|
||||
decls.push(decl);
|
||||
decls = decls.concat(comments());
|
||||
}
|
||||
|
||||
if (!close()) return error("@font-face missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'font-face',
|
||||
declarations: decls
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse import
|
||||
*/
|
||||
|
||||
var atimport = _compileAtrule('import');
|
||||
|
||||
/**
|
||||
* Parse charset
|
||||
*/
|
||||
|
||||
var atcharset = _compileAtrule('charset');
|
||||
|
||||
/**
|
||||
* Parse namespace
|
||||
*/
|
||||
|
||||
var atnamespace = _compileAtrule('namespace');
|
||||
|
||||
/**
|
||||
* Parse non-block at-rules
|
||||
*/
|
||||
|
||||
|
||||
function _compileAtrule(name) {
|
||||
var re = new RegExp('^@' + name + '\\s*([^;]+);');
|
||||
return function() {
|
||||
var pos = position();
|
||||
var m = match(re);
|
||||
if (!m) return;
|
||||
var ret = { type: name };
|
||||
ret[name] = m[1].trim();
|
||||
return pos(ret);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse at rule.
|
||||
*/
|
||||
|
||||
function atrule() {
|
||||
if (css[0] != '@') return;
|
||||
|
||||
return atkeyframes()
|
||||
|| atmedia()
|
||||
|| atcustommedia()
|
||||
|| atsupports()
|
||||
|| atimport()
|
||||
|| atcharset()
|
||||
|| atnamespace()
|
||||
|| atdocument()
|
||||
|| atpage()
|
||||
|| athost()
|
||||
|| atfontface();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse rule.
|
||||
*/
|
||||
|
||||
function rule() {
|
||||
var pos = position();
|
||||
var sel = selector();
|
||||
|
||||
if (!sel) return error('selector missing');
|
||||
comments();
|
||||
|
||||
return pos({
|
||||
type: 'rule',
|
||||
selectors: sel,
|
||||
declarations: declarations()
|
||||
});
|
||||
}
|
||||
|
||||
return addParent(stylesheet());
|
||||
};
|
||||
|
||||
/**
|
||||
* Trim `str`.
|
||||
*/
|
||||
|
||||
function trim(str) {
|
||||
return str ? str.replace(/^\s+|\s+$/g, '') : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds non-enumerable parent node reference to each node.
|
||||
*/
|
||||
|
||||
function addParent(obj, parent) {
|
||||
var isNode = obj && typeof obj.type === 'string';
|
||||
var childParent = isNode ? obj : parent;
|
||||
|
||||
for (var k in obj) {
|
||||
var value = obj[k];
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach(function(v) { addParent(v, childParent); });
|
||||
} else if (value && typeof value === 'object') {
|
||||
addParent(value, childParent);
|
||||
}
|
||||
}
|
||||
|
||||
if (isNode) {
|
||||
Object.defineProperty(obj, 'parent', {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
value: parent || null
|
||||
});
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
|
||||
/**
|
||||
* Expose `Compiler`.
|
||||
*/
|
||||
|
||||
module.exports = Compiler;
|
||||
|
||||
/**
|
||||
* Initialize a compiler.
|
||||
*
|
||||
* @param {Type} name
|
||||
* @return {Type}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function Compiler(opts) {
|
||||
this.options = opts || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit `str`
|
||||
*/
|
||||
|
||||
Compiler.prototype.emit = function(str) {
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit `node`.
|
||||
*/
|
||||
|
||||
Compiler.prototype.visit = function(node){
|
||||
return this[node.type](node);
|
||||
};
|
||||
|
||||
/**
|
||||
* Map visit over array of `nodes`, optionally using a `delim`
|
||||
*/
|
||||
|
||||
Compiler.prototype.mapVisit = function(nodes, delim){
|
||||
var buf = '';
|
||||
delim = delim || '';
|
||||
|
||||
for (var i = 0, length = nodes.length; i < length; i++) {
|
||||
buf += this.visit(nodes[i]);
|
||||
if (delim && i < length - 1) buf += this.emit(delim);
|
||||
}
|
||||
|
||||
return buf;
|
||||
};
|
||||
|
|
@ -1,199 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Base = require('./compiler');
|
||||
var inherits = require('inherits');
|
||||
|
||||
/**
|
||||
* Expose compiler.
|
||||
*/
|
||||
|
||||
module.exports = Compiler;
|
||||
|
||||
/**
|
||||
* Initialize a new `Compiler`.
|
||||
*/
|
||||
|
||||
function Compiler(options) {
|
||||
Base.call(this, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Inherit from `Base.prototype`.
|
||||
*/
|
||||
|
||||
inherits(Compiler, Base);
|
||||
|
||||
/**
|
||||
* Compile `node`.
|
||||
*/
|
||||
|
||||
Compiler.prototype.compile = function(node){
|
||||
return node.stylesheet
|
||||
.rules.map(this.visit, this)
|
||||
.join('');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit comment node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.comment = function(node){
|
||||
return this.emit('', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit import node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.import = function(node){
|
||||
return this.emit('@import ' + node.import + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.media = function(node){
|
||||
return this.emit('@media ' + node.media, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit document node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.document = function(node){
|
||||
var doc = '@' + (node.vendor || '') + 'document ' + node.document;
|
||||
|
||||
return this.emit(doc, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit charset node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.charset = function(node){
|
||||
return this.emit('@charset ' + node.charset + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit namespace node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.namespace = function(node){
|
||||
return this.emit('@namespace ' + node.namespace + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit supports node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.supports = function(node){
|
||||
return this.emit('@supports ' + node.supports, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframes node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframes = function(node){
|
||||
return this.emit('@'
|
||||
+ (node.vendor || '')
|
||||
+ 'keyframes '
|
||||
+ node.name, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.keyframes)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframe node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframe = function(node){
|
||||
var decls = node.declarations;
|
||||
|
||||
return this.emit(node.values.join(','), node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(decls)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit page node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.page = function(node){
|
||||
var sel = node.selectors.length
|
||||
? node.selectors.join(', ')
|
||||
: '';
|
||||
|
||||
return this.emit('@page ' + sel, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.declarations)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit font-face node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['font-face'] = function(node){
|
||||
return this.emit('@font-face', node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.declarations)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit host node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.host = function(node){
|
||||
return this.emit('@host', node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit custom-media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['custom-media'] = function(node){
|
||||
return this.emit('@custom-media ' + node.name + ' ' + node.media + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit rule node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.rule = function(node){
|
||||
var decls = node.declarations;
|
||||
if (!decls.length) return '';
|
||||
|
||||
return this.emit(node.selectors.join(','), node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(decls)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit declaration node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.declaration = function(node){
|
||||
return this.emit(node.property + ':' + node.value, node.position) + this.emit(';');
|
||||
};
|
||||
|
||||
|
|
@ -1,254 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Base = require('./compiler');
|
||||
var inherits = require('inherits');
|
||||
|
||||
/**
|
||||
* Expose compiler.
|
||||
*/
|
||||
|
||||
module.exports = Compiler;
|
||||
|
||||
/**
|
||||
* Initialize a new `Compiler`.
|
||||
*/
|
||||
|
||||
function Compiler(options) {
|
||||
options = options || {};
|
||||
Base.call(this, options);
|
||||
this.indentation = typeof options.indent === 'string' ? options.indent : ' ';
|
||||
}
|
||||
|
||||
/**
|
||||
* Inherit from `Base.prototype`.
|
||||
*/
|
||||
|
||||
inherits(Compiler, Base);
|
||||
|
||||
/**
|
||||
* Compile `node`.
|
||||
*/
|
||||
|
||||
Compiler.prototype.compile = function(node){
|
||||
return this.stylesheet(node);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit stylesheet node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.stylesheet = function(node){
|
||||
return this.mapVisit(node.stylesheet.rules, '\n\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit comment node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.comment = function(node){
|
||||
return this.emit(this.indent() + '/*' + node.comment + '*/', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit import node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.import = function(node){
|
||||
return this.emit('@import ' + node.import + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.media = function(node){
|
||||
return this.emit('@media ' + node.media, node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit document node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.document = function(node){
|
||||
var doc = '@' + (node.vendor || '') + 'document ' + node.document;
|
||||
|
||||
return this.emit(doc, node.position)
|
||||
+ this.emit(
|
||||
' '
|
||||
+ ' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit charset node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.charset = function(node){
|
||||
return this.emit('@charset ' + node.charset + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit namespace node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.namespace = function(node){
|
||||
return this.emit('@namespace ' + node.namespace + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit supports node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.supports = function(node){
|
||||
return this.emit('@supports ' + node.supports, node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframes node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframes = function(node){
|
||||
return this.emit('@' + (node.vendor || '') + 'keyframes ' + node.name, node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.keyframes, '\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframe node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframe = function(node){
|
||||
var decls = node.declarations;
|
||||
|
||||
return this.emit(this.indent())
|
||||
+ this.emit(node.values.join(', '), node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(decls, '\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n'
|
||||
+ this.indent() + '}\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit page node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.page = function(node){
|
||||
var sel = node.selectors.length
|
||||
? node.selectors.join(', ') + ' '
|
||||
: '';
|
||||
|
||||
return this.emit('@page ' + sel, node.position)
|
||||
+ this.emit('{\n')
|
||||
+ this.emit(this.indent(1))
|
||||
+ this.mapVisit(node.declarations, '\n')
|
||||
+ this.emit(this.indent(-1))
|
||||
+ this.emit('\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit font-face node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['font-face'] = function(node){
|
||||
return this.emit('@font-face ', node.position)
|
||||
+ this.emit('{\n')
|
||||
+ this.emit(this.indent(1))
|
||||
+ this.mapVisit(node.declarations, '\n')
|
||||
+ this.emit(this.indent(-1))
|
||||
+ this.emit('\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit host node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.host = function(node){
|
||||
return this.emit('@host', node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit custom-media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['custom-media'] = function(node){
|
||||
return this.emit('@custom-media ' + node.name + ' ' + node.media + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit rule node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.rule = function(node){
|
||||
var indent = this.indent();
|
||||
var decls = node.declarations;
|
||||
if (!decls.length) return '';
|
||||
|
||||
return this.emit(node.selectors.map(function(s){ return indent + s }).join(',\n'), node.position)
|
||||
+ this.emit(' {\n')
|
||||
+ this.emit(this.indent(1))
|
||||
+ this.mapVisit(decls, '\n')
|
||||
+ this.emit(this.indent(-1))
|
||||
+ this.emit('\n' + this.indent() + '}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit declaration node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.declaration = function(node){
|
||||
return this.emit(this.indent())
|
||||
+ this.emit(node.property + ': ' + node.value, node.position)
|
||||
+ this.emit(';');
|
||||
};
|
||||
|
||||
/**
|
||||
* Increase, decrease or return current indentation.
|
||||
*/
|
||||
|
||||
Compiler.prototype.indent = function(level) {
|
||||
this.level = this.level || 1;
|
||||
|
||||
if (null != level) {
|
||||
this.level += level;
|
||||
return '';
|
||||
}
|
||||
|
||||
return Array(this.level).join(this.indentation);
|
||||
};
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Compressed = require('./compress');
|
||||
var Identity = require('./identity');
|
||||
|
||||
/**
|
||||
* Stringfy the given AST `node`.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `compress` space-optimized output
|
||||
* - `sourcemap` return an object with `.code` and `.map`
|
||||
*
|
||||
* @param {Object} node
|
||||
* @param {Object} [options]
|
||||
* @return {String}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
module.exports = function(node, options){
|
||||
options = options || {};
|
||||
|
||||
var compiler = options.compress
|
||||
? new Compressed(options)
|
||||
: new Identity(options);
|
||||
|
||||
// source maps
|
||||
if (options.sourcemap) {
|
||||
var sourcemaps = require('./source-map-support');
|
||||
sourcemaps(compiler);
|
||||
|
||||
var code = compiler.compile(node);
|
||||
compiler.applySourceMaps();
|
||||
|
||||
var map = options.sourcemap === 'generator'
|
||||
? compiler.map
|
||||
: compiler.map.toJSON();
|
||||
|
||||
return { code: code, map: map };
|
||||
}
|
||||
|
||||
var code = compiler.compile(node);
|
||||
return code;
|
||||
};
|
||||
|
|
@ -1,133 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var SourceMap = require('source-map').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('source-map').SourceMapConsumer;
|
||||
var sourceMapResolve = require('source-map-resolve');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
/**
|
||||
* Expose `mixin()`.
|
||||
*/
|
||||
|
||||
module.exports = mixin;
|
||||
|
||||
/**
|
||||
* Ensure Windows-style paths are formatted properly
|
||||
*/
|
||||
|
||||
const makeFriendlyPath = function(aPath) {
|
||||
return path.sep === "\\" ? aPath.replace(/\\/g, "/").replace(/^[a-z]:\/?/i, "/") : aPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mixin source map support into `compiler`.
|
||||
*
|
||||
* @param {Compiler} compiler
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function mixin(compiler) {
|
||||
compiler._comment = compiler.comment;
|
||||
compiler.map = new SourceMap();
|
||||
compiler.position = { line: 1, column: 1 };
|
||||
compiler.files = {};
|
||||
for (var k in exports) compiler[k] = exports[k];
|
||||
}
|
||||
|
||||
/**
|
||||
* Update position.
|
||||
*
|
||||
* @param {String} str
|
||||
* @api private
|
||||
*/
|
||||
|
||||
exports.updatePosition = function(str) {
|
||||
var lines = str.match(/\n/g);
|
||||
if (lines) this.position.line += lines.length;
|
||||
var i = str.lastIndexOf('\n');
|
||||
this.position.column = ~i ? str.length - i : this.position.column + str.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Emit `str`.
|
||||
*
|
||||
* @param {String} str
|
||||
* @param {Object} [pos]
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
exports.emit = function(str, pos) {
|
||||
if (pos) {
|
||||
var sourceFile = makeFriendlyPath(pos.source || 'source.css');
|
||||
|
||||
this.map.addMapping({
|
||||
source: sourceFile,
|
||||
generated: {
|
||||
line: this.position.line,
|
||||
column: Math.max(this.position.column - 1, 0)
|
||||
},
|
||||
original: {
|
||||
line: pos.start.line,
|
||||
column: pos.start.column - 1
|
||||
}
|
||||
});
|
||||
|
||||
this.addFile(sourceFile, pos);
|
||||
}
|
||||
|
||||
this.updatePosition(str);
|
||||
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds a file to the source map output if it has not already been added
|
||||
* @param {String} file
|
||||
* @param {Object} pos
|
||||
*/
|
||||
|
||||
exports.addFile = function(file, pos) {
|
||||
if (typeof pos.content !== 'string') return;
|
||||
if (Object.prototype.hasOwnProperty.call(this.files, file)) return;
|
||||
|
||||
this.files[file] = pos.content;
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies any original source maps to the output and embeds the source file
|
||||
* contents in the source map.
|
||||
*/
|
||||
|
||||
exports.applySourceMaps = function() {
|
||||
Object.keys(this.files).forEach(function(file) {
|
||||
var content = this.files[file];
|
||||
this.map.setSourceContent(file, content);
|
||||
|
||||
if (this.options.inputSourcemaps !== false) {
|
||||
var originalMap = sourceMapResolve.resolveSync(
|
||||
content, file, fs.readFileSync);
|
||||
if (originalMap) {
|
||||
var map = new SourceMapConsumer(originalMap.map);
|
||||
var relativeTo = originalMap.sourcesRelativeTo;
|
||||
this.map.applySourceMap(map, file, makeFriendlyPath(path.dirname(relativeTo)));
|
||||
}
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Process comments, drops sourceMap comments.
|
||||
* @param {Object} node
|
||||
*/
|
||||
|
||||
exports.comment = function(node) {
|
||||
if (/^# sourceMappingURL=/.test(node.comment))
|
||||
return this.emit('', node.position);
|
||||
else
|
||||
return this._comment(node);
|
||||
};
|
||||
|
|
@ -4,14 +4,13 @@ const fs = require('../libs/fsExtra')
|
|||
|
||||
const workerThreads = require('worker_threads')
|
||||
const Logger = require('../Logger')
|
||||
const TaskManager = require('./TaskManager')
|
||||
const Task = require('../objects/Task')
|
||||
const { writeConcatFile } = require('../utils/ffmpegHelpers')
|
||||
const toneHelpers = require('../utils/toneHelpers')
|
||||
|
||||
class AbMergeManager {
|
||||
constructor(taskManager) {
|
||||
this.taskManager = taskManager
|
||||
|
||||
constructor() {
|
||||
this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items')
|
||||
|
||||
this.pendingTasks = []
|
||||
|
|
@ -45,7 +44,7 @@ class AbMergeManager {
|
|||
}
|
||||
const taskDescription = `Encoding audiobook "${libraryItem.media.metadata.title}" into a single m4b file.`
|
||||
task.setData('encode-m4b', 'Encoding M4b', taskDescription, false, taskData)
|
||||
this.taskManager.addTask(task)
|
||||
TaskManager.addTask(task)
|
||||
Logger.info(`Start m4b encode for ${libraryItem.id} - TaskId: ${task.id}`)
|
||||
|
||||
if (!await fs.pathExists(taskData.itemCachePath)) {
|
||||
|
|
@ -234,7 +233,7 @@ class AbMergeManager {
|
|||
}
|
||||
}
|
||||
|
||||
this.taskManager.taskFinished(task)
|
||||
TaskManager.taskFinished(task)
|
||||
}
|
||||
}
|
||||
module.exports = AbMergeManager
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ const fs = require('../libs/fsExtra')
|
|||
|
||||
const toneHelpers = require('../utils/toneHelpers')
|
||||
|
||||
const TaskManager = require('./TaskManager')
|
||||
|
||||
const Task = require('../objects/Task')
|
||||
|
||||
class AudioMetadataMangaer {
|
||||
constructor(taskManager) {
|
||||
this.taskManager = taskManager
|
||||
|
||||
constructor() {
|
||||
this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items')
|
||||
|
||||
this.MAX_CONCURRENT_TASKS = 1
|
||||
|
|
@ -101,7 +101,7 @@ class AudioMetadataMangaer {
|
|||
|
||||
async runMetadataEmbed(task) {
|
||||
this.tasksRunning.push(task)
|
||||
this.taskManager.addTask(task)
|
||||
TaskManager.addTask(task)
|
||||
|
||||
Logger.info(`[AudioMetadataManager] Starting metadata embed task`, task.description)
|
||||
|
||||
|
|
@ -176,7 +176,7 @@ class AudioMetadataMangaer {
|
|||
}
|
||||
|
||||
handleTaskFinished(task) {
|
||||
this.taskManager.taskFinished(task)
|
||||
TaskManager.taskFinished(task)
|
||||
this.tasksRunning = this.tasksRunning.filter(t => t.id !== task.id)
|
||||
|
||||
if (this.tasksRunning.length < this.MAX_CONCURRENT_TASKS && this.tasksQueued.length) {
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ const readChunk = require('../libs/readChunk')
|
|||
const imageType = require('../libs/imageType')
|
||||
|
||||
const globals = require('../utils/globals')
|
||||
const { downloadFile, filePathToPOSIX, checkPathIsFile } = require('../utils/fileUtils')
|
||||
const { downloadImageFile, filePathToPOSIX, checkPathIsFile } = require('../utils/fileUtils')
|
||||
const { extractCoverArt } = require('../utils/ffmpegHelpers')
|
||||
const CacheManager = require('../managers/CacheManager')
|
||||
|
||||
|
|
@ -120,13 +120,16 @@ class CoverManager {
|
|||
await fs.ensureDir(coverDirPath)
|
||||
|
||||
var temppath = Path.posix.join(coverDirPath, 'cover')
|
||||
var success = await downloadFile(url, temppath).then(() => true).catch((err) => {
|
||||
Logger.error(`[CoverManager] Download image file failed for "${url}"`, err)
|
||||
|
||||
let errorMsg = ''
|
||||
let success = await downloadImageFile(url, temppath).then(() => true).catch((err) => {
|
||||
errorMsg = err.message || 'Unknown error'
|
||||
Logger.error(`[CoverManager] Download image file failed for "${url}"`, errorMsg)
|
||||
return false
|
||||
})
|
||||
if (!success) {
|
||||
return {
|
||||
error: 'Failed to download image from url'
|
||||
error: 'Failed to download image from url: ' + errorMsg
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -284,7 +287,7 @@ class CoverManager {
|
|||
await fs.ensureDir(coverDirPath)
|
||||
|
||||
const temppath = Path.posix.join(coverDirPath, 'cover')
|
||||
const success = await downloadFile(url, temppath).then(() => true).catch((err) => {
|
||||
const success = await downloadImageFile(url, temppath).then(() => true).catch((err) => {
|
||||
Logger.error(`[CoverManager] Download image file failed for "${url}"`, err)
|
||||
return false
|
||||
})
|
||||
|
|
|
|||
|
|
@ -330,14 +330,15 @@ class PlaybackSessionManager {
|
|||
Logger.debug(`[PlaybackSessionManager] Removed session "${sessionId}"`)
|
||||
}
|
||||
|
||||
// Check for streams that are not in memory and remove
|
||||
/**
|
||||
* Remove all stream folders in `/metadata/streams`
|
||||
*/
|
||||
async removeOrphanStreams() {
|
||||
await fs.ensureDir(this.StreamsPath)
|
||||
try {
|
||||
const streamsInPath = await fs.readdir(this.StreamsPath)
|
||||
for (let i = 0; i < streamsInPath.length; i++) {
|
||||
const streamId = streamsInPath[i]
|
||||
if (streamId.startsWith('play_')) { // Make sure to only remove folders that are a stream
|
||||
for (const streamId of streamsInPath) {
|
||||
if (/[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/.test(streamId)) { // Ensure is uuidv4
|
||||
const session = this.sessions.find(se => se.id === streamId)
|
||||
if (!session) {
|
||||
const streamPath = Path.join(this.StreamsPath, streamId)
|
||||
|
|
|
|||
|
|
@ -12,17 +12,17 @@ const opmlGenerator = require('../utils/generators/opmlGenerator')
|
|||
const prober = require('../utils/prober')
|
||||
const ffmpegHelpers = require('../utils/ffmpegHelpers')
|
||||
|
||||
const TaskManager = require('./TaskManager')
|
||||
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const PodcastEpisodeDownload = require('../objects/PodcastEpisodeDownload')
|
||||
const PodcastEpisode = require('../objects/entities/PodcastEpisode')
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
const Task = require("../objects/Task")
|
||||
|
||||
class PodcastManager {
|
||||
constructor(watcher, notificationManager, taskManager) {
|
||||
constructor(watcher, notificationManager) {
|
||||
this.watcher = watcher
|
||||
this.notificationManager = notificationManager
|
||||
this.taskManager = taskManager
|
||||
|
||||
this.downloadQueue = []
|
||||
this.currentDownload = null
|
||||
|
|
@ -69,14 +69,12 @@ class PodcastManager {
|
|||
return
|
||||
}
|
||||
|
||||
const task = new Task()
|
||||
const taskDescription = `Downloading episode "${podcastEpisodeDownload.podcastEpisode.title}".`
|
||||
const taskData = {
|
||||
libraryId: podcastEpisodeDownload.libraryId,
|
||||
libraryItemId: podcastEpisodeDownload.libraryItemId,
|
||||
}
|
||||
task.setData('download-podcast-episode', 'Downloading Episode', taskDescription, false, taskData)
|
||||
this.taskManager.addTask(task)
|
||||
const task = TaskManager.createAndAddTask('download-podcast-episode', 'Downloading Episode', taskDescription, false, taskData)
|
||||
|
||||
SocketAuthority.emitter('episode_download_started', podcastEpisodeDownload.toJSONForClient())
|
||||
this.currentDownload = podcastEpisodeDownload
|
||||
|
|
@ -128,7 +126,7 @@ class PodcastManager {
|
|||
this.currentDownload.setFinished(false)
|
||||
}
|
||||
|
||||
this.taskManager.taskFinished(task)
|
||||
TaskManager.taskFinished(task)
|
||||
|
||||
SocketAuthority.emitter('episode_download_finished', this.currentDownload.toJSONForClient())
|
||||
SocketAuthority.emitter('episode_download_queue_updated', this.getDownloadQueueDetails())
|
||||
|
|
@ -201,7 +199,7 @@ class PodcastManager {
|
|||
})
|
||||
// TODO: Should we check for open playback sessions for this episode?
|
||||
// TODO: remove all user progress for this episode
|
||||
if (oldestEpisode && oldestEpisode.audioFile) {
|
||||
if (oldestEpisode?.audioFile) {
|
||||
Logger.info(`[PodcastManager] Deleting oldest episode "${oldestEpisode.title}"`)
|
||||
const successfullyDeleted = await removeFile(oldestEpisode.audioFile.metadata.path)
|
||||
if (successfullyDeleted) {
|
||||
|
|
@ -246,7 +244,7 @@ class PodcastManager {
|
|||
Logger.debug(`[PodcastManager] runEpisodeCheck: "${libraryItem.media.metadata.title}" checking for episodes after ${new Date(dateToCheckForEpisodesAfter)}`)
|
||||
|
||||
var newEpisodes = await this.checkPodcastForNewEpisodes(libraryItem, dateToCheckForEpisodesAfter, libraryItem.media.maxNewEpisodesToDownload)
|
||||
Logger.debug(`[PodcastManager] runEpisodeCheck: ${newEpisodes ? newEpisodes.length : 'N/A'} episodes found`)
|
||||
Logger.debug(`[PodcastManager] runEpisodeCheck: ${newEpisodes?.length || 'N/A'} episodes found`)
|
||||
|
||||
if (!newEpisodes) { // Failed
|
||||
// Allow up to MaxFailedEpisodeChecks failed attempts before disabling auto download
|
||||
|
|
@ -280,14 +278,14 @@ class PodcastManager {
|
|||
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes no feed url for ${podcastLibraryItem.media.metadata.title} (ID: ${podcastLibraryItem.id})`)
|
||||
return false
|
||||
}
|
||||
var feed = await getPodcastFeed(podcastLibraryItem.media.metadata.feedUrl)
|
||||
if (!feed || !feed.episodes) {
|
||||
const feed = await getPodcastFeed(podcastLibraryItem.media.metadata.feedUrl)
|
||||
if (!feed?.episodes) {
|
||||
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes invalid feed payload for ${podcastLibraryItem.media.metadata.title} (ID: ${podcastLibraryItem.id})`, feed)
|
||||
return false
|
||||
}
|
||||
|
||||
// Filter new and not already has
|
||||
var newEpisodes = feed.episodes.filter(ep => ep.publishedAt > dateToCheckForEpisodesAfter && !podcastLibraryItem.media.checkHasEpisodeByFeedUrl(ep.enclosure.url))
|
||||
let newEpisodes = feed.episodes.filter(ep => ep.publishedAt > dateToCheckForEpisodesAfter && !podcastLibraryItem.media.checkHasEpisodeByFeedUrl(ep.enclosure.url))
|
||||
|
||||
if (maxNewEpisodes > 0) {
|
||||
newEpisodes = newEpisodes.slice(0, maxNewEpisodes)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,48 @@
|
|||
const SocketAuthority = require('../SocketAuthority')
|
||||
const Task = require('../objects/Task')
|
||||
|
||||
class TaskManager {
|
||||
constructor() {
|
||||
/** @type {Task[]} */
|
||||
this.tasks = []
|
||||
}
|
||||
|
||||
/**
|
||||
* Add task and emit socket task_started event
|
||||
*
|
||||
* @param {Task} task
|
||||
*/
|
||||
addTask(task) {
|
||||
this.tasks.push(task)
|
||||
SocketAuthority.emitter('task_started', task.toJSON())
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove task and emit task_finished event
|
||||
*
|
||||
* @param {Task} task
|
||||
*/
|
||||
taskFinished(task) {
|
||||
if (this.tasks.some(t => t.id === task.id)) {
|
||||
this.tasks = this.tasks.filter(t => t.id !== task.id)
|
||||
SocketAuthority.emitter('task_finished', task.toJSON())
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new task and add
|
||||
*
|
||||
* @param {string} action
|
||||
* @param {string} title
|
||||
* @param {string} description
|
||||
* @param {boolean} showSuccess
|
||||
* @param {Object} [data]
|
||||
*/
|
||||
createAndAddTask(action, title, description, showSuccess, data = {}) {
|
||||
const task = new Task()
|
||||
task.setData(action, title, description, showSuccess, data)
|
||||
this.addTask(task)
|
||||
return task
|
||||
}
|
||||
}
|
||||
module.exports = TaskManager
|
||||
module.exports = new TaskManager()
|
||||
|
|
@ -211,6 +211,32 @@ class Book extends Model {
|
|||
}
|
||||
}
|
||||
|
||||
getAbsMetadataJson() {
|
||||
return {
|
||||
tags: this.tags || [],
|
||||
chapters: this.chapters?.map(c => ({ ...c })) || [],
|
||||
title: this.title,
|
||||
subtitle: this.subtitle,
|
||||
authors: this.authors.map(a => a.name),
|
||||
narrators: this.narrators,
|
||||
series: this.series.map(se => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}),
|
||||
genres: this.genres || [],
|
||||
publishedYear: this.publishedYear,
|
||||
publishedDate: this.publishedDate,
|
||||
publisher: this.publisher,
|
||||
description: this.description,
|
||||
isbn: this.isbn,
|
||||
asin: this.asin,
|
||||
language: this.language,
|
||||
explicit: !!this.explicit,
|
||||
abridged: !!this.abridged
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ const oldLibrary = require('../objects/Library')
|
|||
* @property {string} autoScanCronExpression
|
||||
* @property {boolean} audiobooksOnly
|
||||
* @property {boolean} hideSingleBookSeries Do not show series that only have 1 book
|
||||
* @property {string[]} metadataPrecedence
|
||||
*/
|
||||
|
||||
class Library extends Model {
|
||||
|
|
@ -79,6 +80,9 @@ class Library extends Model {
|
|||
mediaType: libraryExpanded.mediaType,
|
||||
provider: libraryExpanded.provider,
|
||||
settings: libraryExpanded.settings,
|
||||
lastScan: libraryExpanded.lastScan?.valueOf() || null,
|
||||
lastScanVersion: libraryExpanded.lastScanVersion || null,
|
||||
lastScanMetadataPrecedence: libraryExpanded.extraData?.lastScanMetadataPrecedence || null,
|
||||
createdAt: libraryExpanded.createdAt.valueOf(),
|
||||
lastUpdate: libraryExpanded.updatedAt.valueOf()
|
||||
})
|
||||
|
|
@ -151,6 +155,9 @@ class Library extends Model {
|
|||
if (oldLibrary.oldLibraryId) {
|
||||
extraData.oldLibraryId = oldLibrary.oldLibraryId
|
||||
}
|
||||
if (oldLibrary.lastScanMetadataPrecedence) {
|
||||
extraData.lastScanMetadataPrecedence = oldLibrary.lastScanMetadataPrecedence
|
||||
}
|
||||
return {
|
||||
id: oldLibrary.id,
|
||||
name: oldLibrary.name,
|
||||
|
|
@ -159,6 +166,8 @@ class Library extends Model {
|
|||
mediaType: oldLibrary.mediaType || null,
|
||||
provider: oldLibrary.provider,
|
||||
settings: oldLibrary.settings?.toJSON() || {},
|
||||
lastScan: oldLibrary.lastScan || null,
|
||||
lastScanVersion: oldLibrary.lastScanVersion || null,
|
||||
createdAt: oldLibrary.createdAt,
|
||||
updatedAt: oldLibrary.lastUpdate,
|
||||
extraData
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class LibraryItem extends Model {
|
|||
*
|
||||
* @param {number} offset
|
||||
* @param {number} limit
|
||||
* @returns {Promise<Model<LibraryItem>[]>} LibraryItem
|
||||
* @returns {Promise<LibraryItem[]>} LibraryItem
|
||||
*/
|
||||
static getLibraryItemsIncrement(offset, limit, where = null) {
|
||||
return this.findAll({
|
||||
|
|
|
|||
|
|
@ -112,6 +112,25 @@ class Podcast extends Model {
|
|||
}
|
||||
}
|
||||
|
||||
getAbsMetadataJson() {
|
||||
return {
|
||||
tags: this.tags || [],
|
||||
title: this.title,
|
||||
author: this.author,
|
||||
description: this.description,
|
||||
releaseDate: this.releaseDate,
|
||||
genres: this.genres || [],
|
||||
feedURL: this.feedURL,
|
||||
imageURL: this.imageURL,
|
||||
itunesPageURL: this.itunesPageURL,
|
||||
itunesId: this.itunesId,
|
||||
itunesArtistId: this.itunesArtistId,
|
||||
language: this.language,
|
||||
explicit: !!this.explicit,
|
||||
podcastType: this.podcastType
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
|
|
|
|||
|
|
@ -79,6 +79,7 @@ class PodcastEpisode extends Model {
|
|||
subtitle: this.subtitle,
|
||||
description: this.description,
|
||||
enclosure,
|
||||
guid: this.extraData?.guid || null,
|
||||
pubDate: this.pubDate,
|
||||
chapters: this.chapters,
|
||||
audioFile: this.audioFile,
|
||||
|
|
@ -98,6 +99,9 @@ class PodcastEpisode extends Model {
|
|||
if (oldEpisode.oldEpisodeId) {
|
||||
extraData.oldEpisodeId = oldEpisode.oldEpisodeId
|
||||
}
|
||||
if (oldEpisode.guid) {
|
||||
extraData.guid = oldEpisode.guid
|
||||
}
|
||||
return {
|
||||
id: oldEpisode.id,
|
||||
index: oldEpisode.index,
|
||||
|
|
|
|||
|
|
@ -59,6 +59,7 @@ class User extends Model {
|
|||
id: userExpanded.id,
|
||||
oldUserId: userExpanded.extraData?.oldUserId || null,
|
||||
username: userExpanded.username,
|
||||
email: userExpanded.email || null,
|
||||
pash: userExpanded.pash,
|
||||
type: userExpanded.type,
|
||||
token: userExpanded.token,
|
||||
|
|
@ -96,6 +97,7 @@ class User extends Model {
|
|||
return {
|
||||
id: oldUser.id,
|
||||
username: oldUser.username,
|
||||
email: oldUser.email || null,
|
||||
pash: oldUser.pash || null,
|
||||
type: oldUser.type || null,
|
||||
token: oldUser.token || null,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const Path = require('path')
|
||||
const uuidv4 = require("uuid").v4
|
||||
const FeedMeta = require('./FeedMeta')
|
||||
const FeedEpisode = require('./FeedEpisode')
|
||||
|
|
@ -101,11 +102,13 @@ class Feed {
|
|||
this.serverAddress = serverAddress
|
||||
this.feedUrl = feedUrl
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta = new FeedMeta()
|
||||
this.meta.title = mediaMetadata.title
|
||||
this.meta.description = mediaMetadata.description
|
||||
this.meta.author = author
|
||||
this.meta.imageUrl = media.coverPath ? `${serverAddress}/feed/${slug}/cover` : `${serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = media.coverPath ? `${serverAddress}/feed/${slug}/cover${coverFileExtension}` : `${serverAddress}/Logo.png`
|
||||
this.meta.feedUrl = feedUrl
|
||||
this.meta.link = `${serverAddress}/item/${libraryItem.id}`
|
||||
this.meta.explicit = !!mediaMetadata.explicit
|
||||
|
|
@ -145,10 +148,12 @@ class Feed {
|
|||
this.entityUpdatedAt = libraryItem.updatedAt
|
||||
this.coverPath = media.coverPath || null
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta.title = mediaMetadata.title
|
||||
this.meta.description = mediaMetadata.description
|
||||
this.meta.author = author
|
||||
this.meta.imageUrl = media.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = media.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover${coverFileExtension}` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.explicit = !!mediaMetadata.explicit
|
||||
this.meta.type = mediaMetadata.type
|
||||
this.meta.language = mediaMetadata.language
|
||||
|
|
@ -174,7 +179,7 @@ class Feed {
|
|||
this.xml = null
|
||||
}
|
||||
|
||||
setFromCollection(userId, slug, collectionExpanded, serverAddress) {
|
||||
setFromCollection(userId, slug, collectionExpanded, serverAddress, preventIndexing = true, ownerName = null, ownerEmail = null) {
|
||||
const feedUrl = `${serverAddress}/feed/${slug}`
|
||||
|
||||
const itemsWithTracks = collectionExpanded.books.filter(libraryItem => libraryItem.media.tracks.length)
|
||||
|
|
@ -190,14 +195,19 @@ class Feed {
|
|||
this.serverAddress = serverAddress
|
||||
this.feedUrl = feedUrl
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta = new FeedMeta()
|
||||
this.meta.title = collectionExpanded.name
|
||||
this.meta.description = collectionExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover` : `${serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover${coverFileExtension}` : `${serverAddress}/Logo.png`
|
||||
this.meta.feedUrl = feedUrl
|
||||
this.meta.link = `${serverAddress}/collection/${collectionExpanded.id}`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
this.meta.preventIndexing = preventIndexing
|
||||
this.meta.ownerName = ownerName
|
||||
this.meta.ownerEmail = ownerEmail
|
||||
|
||||
this.episodes = []
|
||||
|
||||
|
|
@ -222,10 +232,12 @@ class Feed {
|
|||
this.entityUpdatedAt = collectionExpanded.lastUpdate
|
||||
this.coverPath = firstItemWithCover?.coverPath || null
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta.title = collectionExpanded.name
|
||||
this.meta.description = collectionExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover${coverFileExtension}` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
|
||||
this.episodes = []
|
||||
|
|
@ -244,7 +256,7 @@ class Feed {
|
|||
this.xml = null
|
||||
}
|
||||
|
||||
setFromSeries(userId, slug, seriesExpanded, serverAddress) {
|
||||
setFromSeries(userId, slug, seriesExpanded, serverAddress, preventIndexing = true, ownerName = null, ownerEmail = null) {
|
||||
const feedUrl = `${serverAddress}/feed/${slug}`
|
||||
|
||||
let itemsWithTracks = seriesExpanded.books.filter(libraryItem => libraryItem.media.tracks.length)
|
||||
|
|
@ -264,14 +276,19 @@ class Feed {
|
|||
this.serverAddress = serverAddress
|
||||
this.feedUrl = feedUrl
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta = new FeedMeta()
|
||||
this.meta.title = seriesExpanded.name
|
||||
this.meta.description = seriesExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover` : `${serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover${coverFileExtension}` : `${serverAddress}/Logo.png`
|
||||
this.meta.feedUrl = feedUrl
|
||||
this.meta.link = `${serverAddress}/library/${libraryId}/series/${seriesExpanded.id}`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
this.meta.preventIndexing = preventIndexing
|
||||
this.meta.ownerName = ownerName
|
||||
this.meta.ownerEmail = ownerEmail
|
||||
|
||||
this.episodes = []
|
||||
|
||||
|
|
@ -299,10 +316,12 @@ class Feed {
|
|||
this.entityUpdatedAt = seriesExpanded.updatedAt
|
||||
this.coverPath = firstItemWithCover?.coverPath || null
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta.title = seriesExpanded.name
|
||||
this.meta.description = seriesExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover${coverFileExtension}` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
|
||||
this.episodes = []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const Path = require('path')
|
||||
const uuidv4 = require("uuid").v4
|
||||
const date = require('../libs/dateAndTime')
|
||||
const { secondsToTimestamp } = require('../utils/index')
|
||||
|
|
@ -69,7 +70,8 @@ class FeedEpisode {
|
|||
}
|
||||
|
||||
setFromPodcastEpisode(libraryItem, serverAddress, slug, episode, meta) {
|
||||
const contentUrl = `/feed/${slug}/item/${episode.id}/${episode.audioFile.metadata.filename}`
|
||||
const contentFileExtension = Path.extname(episode.audioFile.metadata.filename)
|
||||
const contentUrl = `/feed/${slug}/item/${episode.id}/media${contentFileExtension}`
|
||||
const media = libraryItem.media
|
||||
const mediaMetadata = media.metadata
|
||||
|
||||
|
|
@ -108,7 +110,8 @@ class FeedEpisode {
|
|||
// e.g. Track 1 will have a pub date before Track 2
|
||||
const audiobookPubDate = date.format(new Date(libraryItem.addedAt + timeOffset), 'ddd, DD MMM YYYY HH:mm:ss [GMT]')
|
||||
|
||||
const contentUrl = `/feed/${slug}/item/${episodeId}/${audioTrack.metadata.filename}`
|
||||
const contentFileExtension = Path.extname(audioTrack.metadata.filename)
|
||||
const contentUrl = `/feed/${slug}/item/${episodeId}/media${contentFileExtension}`
|
||||
const media = libraryItem.media
|
||||
const mediaMetadata = media.metadata
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,9 @@ class Library {
|
|||
this.provider = 'google'
|
||||
|
||||
this.lastScan = 0
|
||||
this.lastScanVersion = null
|
||||
this.lastScanMetadataPrecedence = null
|
||||
|
||||
this.settings = null
|
||||
|
||||
this.createdAt = null
|
||||
|
|
@ -53,6 +56,10 @@ class Library {
|
|||
this.settings.disableWatcher = !!library.disableWatcher
|
||||
}
|
||||
|
||||
this.lastScan = library.lastScan
|
||||
this.lastScanVersion = library.lastScanVersion
|
||||
this.lastScanMetadataPrecedence = library.lastScanMetadataPrecedence
|
||||
|
||||
this.createdAt = library.createdAt
|
||||
this.lastUpdate = library.lastUpdate
|
||||
this.cleanOldValues() // mediaType changed for v2 and icon change for v2.2.2
|
||||
|
|
@ -84,6 +91,8 @@ class Library {
|
|||
mediaType: this.mediaType,
|
||||
provider: this.provider,
|
||||
settings: this.settings.toJSON(),
|
||||
lastScan: this.lastScan,
|
||||
lastScanVersion: this.lastScanVersion,
|
||||
createdAt: this.createdAt,
|
||||
lastUpdate: this.lastUpdate
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const fs = require('../libs/fsExtra')
|
||||
const Path = require('path')
|
||||
const { version } = require('../../package.json')
|
||||
const Logger = require('../Logger')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const LibraryFile = require('./files/LibraryFile')
|
||||
const Book = require('./mediaTypes/Book')
|
||||
const Podcast = require('./mediaTypes/Podcast')
|
||||
const Video = require('./mediaTypes/Video')
|
||||
const Music = require('./mediaTypes/Music')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch } = require('../utils/index')
|
||||
const { areEquivalent, copyValue } = require('../utils/index')
|
||||
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
|
||||
class LibraryItem {
|
||||
|
|
@ -180,34 +178,23 @@ class LibraryItem {
|
|||
this.libraryFiles.forEach((lf) => total += lf.metadata.size)
|
||||
return total
|
||||
}
|
||||
get audioFileTotalSize() {
|
||||
let total = 0
|
||||
this.libraryFiles.filter(lf => lf.fileType == 'audio').forEach((lf) => total += lf.metadata.size)
|
||||
return total
|
||||
}
|
||||
get hasAudioFiles() {
|
||||
return this.libraryFiles.some(lf => lf.fileType === 'audio')
|
||||
}
|
||||
get hasMediaEntities() {
|
||||
return this.media.hasMediaEntities
|
||||
}
|
||||
get hasIssues() {
|
||||
if (this.isMissing || this.isInvalid) return true
|
||||
return this.media.hasIssues
|
||||
}
|
||||
|
||||
// Data comes from scandir library item data
|
||||
// TODO: Remove this function. Only used when creating a new podcast now
|
||||
setData(libraryMediaType, payload) {
|
||||
this.id = uuidv4()
|
||||
this.mediaType = libraryMediaType
|
||||
if (libraryMediaType === 'video') {
|
||||
this.media = new Video()
|
||||
} else if (libraryMediaType === 'podcast') {
|
||||
if (libraryMediaType === 'podcast') {
|
||||
this.media = new Podcast()
|
||||
} else if (libraryMediaType === 'book') {
|
||||
this.media = new Book()
|
||||
} else if (libraryMediaType === 'music') {
|
||||
this.media = new Music()
|
||||
} else {
|
||||
Logger.error(`[LibraryItem] setData called with unsupported media type "${libraryMediaType}"`)
|
||||
return
|
||||
}
|
||||
this.media.id = uuidv4()
|
||||
this.media.libraryItemId = this.id
|
||||
|
|
@ -270,85 +257,13 @@ class LibraryItem {
|
|||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
setInvalid() {
|
||||
this.isInvalid = true
|
||||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
setLastScan() {
|
||||
this.lastScan = Date.now()
|
||||
this.updatedAt = Date.now()
|
||||
this.scanVersion = version
|
||||
}
|
||||
|
||||
// Returns null if file not found, true if file was updated, false if up to date
|
||||
// updates existing LibraryFile, AudioFile, EBookFile's
|
||||
checkFileFound(fileFound) {
|
||||
let hasUpdated = false
|
||||
|
||||
let existingFile = this.libraryFiles.find(lf => lf.ino === fileFound.ino)
|
||||
let mediaFile = null
|
||||
if (!existingFile) {
|
||||
existingFile = this.libraryFiles.find(lf => lf.metadata.path === fileFound.metadata.path)
|
||||
if (existingFile) {
|
||||
// Update media file ino
|
||||
mediaFile = this.media.findFileWithInode(existingFile.ino)
|
||||
if (mediaFile) {
|
||||
mediaFile.ino = fileFound.ino
|
||||
}
|
||||
|
||||
// file inode was updated
|
||||
existingFile.ino = fileFound.ino
|
||||
hasUpdated = true
|
||||
} else {
|
||||
// file not found
|
||||
return null
|
||||
}
|
||||
} else {
|
||||
mediaFile = this.media.findFileWithInode(existingFile.ino)
|
||||
}
|
||||
|
||||
if (existingFile.metadata.path !== fileFound.metadata.path) {
|
||||
existingFile.metadata.path = fileFound.metadata.path
|
||||
existingFile.metadata.relPath = fileFound.metadata.relPath
|
||||
if (mediaFile) {
|
||||
mediaFile.metadata.path = fileFound.metadata.path
|
||||
mediaFile.metadata.relPath = fileFound.metadata.relPath
|
||||
}
|
||||
hasUpdated = true
|
||||
}
|
||||
|
||||
// FileMetadata keys
|
||||
['filename', 'ext', 'mtimeMs', 'ctimeMs', 'birthtimeMs', 'size'].forEach((key) => {
|
||||
if (existingFile.metadata[key] !== fileFound.metadata[key]) {
|
||||
// Add modified flag on file data object if exists and was changed
|
||||
if (key === 'mtimeMs' && existingFile.metadata[key]) {
|
||||
fileFound.metadata.wasModified = true
|
||||
}
|
||||
|
||||
existingFile.metadata[key] = fileFound.metadata[key]
|
||||
if (mediaFile) {
|
||||
if (key === 'mtimeMs') mediaFile.metadata.wasModified = true
|
||||
mediaFile.metadata[key] = fileFound.metadata[key]
|
||||
}
|
||||
hasUpdated = true
|
||||
}
|
||||
})
|
||||
|
||||
return hasUpdated
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
query = cleanStringForSearch(query)
|
||||
return this.media.searchQuery(query)
|
||||
}
|
||||
|
||||
getDirectPlayTracklist(episodeId) {
|
||||
return this.media.getDirectPlayTracklist(episodeId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Save metadata.json/metadata.abs file
|
||||
* Save metadata.json file
|
||||
* TODO: Move to new LibraryItem model
|
||||
* @returns {Promise<LibraryFile>} null if not saved
|
||||
*/
|
||||
async saveMetadata() {
|
||||
|
|
@ -366,91 +281,41 @@ class LibraryItem {
|
|||
await fs.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${metadataFileFormat}`)
|
||||
if (metadataFileFormat === 'json') {
|
||||
// Remove metadata.abs if it exists
|
||||
if (await fs.pathExists(Path.join(metadataPath, `metadata.abs`))) {
|
||||
Logger.debug(`[LibraryItem] Removing metadata.abs for item "${this.media.metadata.title}"`)
|
||||
await fs.remove(Path.join(metadataPath, `metadata.abs`))
|
||||
this.libraryFiles = this.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.abs`)))
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
|
||||
return fs.writeFile(metadataFilePath, JSON.stringify(this.media.toJSONForMetadataFile(), null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = this.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
metadataLibraryFile = new LibraryFile()
|
||||
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
this.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
|
||||
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
|
||||
}
|
||||
}
|
||||
|
||||
return fs.writeFile(metadataFilePath, JSON.stringify(this.media.toJSONForMetadataFile(), null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = this.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
metadataLibraryFile = new LibraryFile()
|
||||
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
this.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
|
||||
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
|
||||
}
|
||||
}
|
||||
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
Logger.error(`[LibraryItem] Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
}).finally(() => {
|
||||
this.isSavingMetadata = false
|
||||
})
|
||||
} else {
|
||||
// Remove metadata.json if it exists
|
||||
if (await fs.pathExists(Path.join(metadataPath, `metadata.json`))) {
|
||||
Logger.debug(`[LibraryItem] Removing metadata.json for item "${this.media.metadata.title}"`)
|
||||
await fs.remove(Path.join(metadataPath, `metadata.json`))
|
||||
this.libraryFiles = this.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.json`)))
|
||||
}
|
||||
|
||||
return abmetadataGenerator.generate(this, metadataFilePath).then(async (success) => {
|
||||
if (!success) {
|
||||
Logger.error(`[LibraryItem] Failed saving abmetadata to "${metadataFilePath}"`)
|
||||
return null
|
||||
}
|
||||
// Add metadata.abs to libraryFiles array if it is new
|
||||
let metadataLibraryFile = this.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
metadataLibraryFile = new LibraryFile()
|
||||
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.abs`)
|
||||
this.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
|
||||
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
|
||||
}
|
||||
}
|
||||
|
||||
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
|
||||
return metadataLibraryFile
|
||||
}).finally(() => {
|
||||
this.isSavingMetadata = false
|
||||
})
|
||||
}
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
Logger.error(`[LibraryItem] Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
}).finally(() => {
|
||||
this.isSavingMetadata = false
|
||||
})
|
||||
}
|
||||
|
||||
removeLibraryFile(ino) {
|
||||
|
|
|
|||
|
|
@ -101,7 +101,6 @@ class Stream extends EventEmitter {
|
|||
return 'mpegts'
|
||||
}
|
||||
get segmentBasename() {
|
||||
if (this.hlsSegmentType === 'fmp4') return 'output-%d.m4s'
|
||||
return 'output-%d.ts'
|
||||
}
|
||||
get segmentStartNumber() {
|
||||
|
|
@ -142,19 +141,21 @@ class Stream extends EventEmitter {
|
|||
|
||||
async checkSegmentNumberRequest(segNum) {
|
||||
const segStartTime = segNum * this.segmentLength
|
||||
if (this.startTime > segStartTime) {
|
||||
Logger.warn(`[STREAM] Segment #${segNum} Request @${secondsToTimestamp(segStartTime)} is before start time (${secondsToTimestamp(this.startTime)}) - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 2))
|
||||
if (this.segmentStartNumber > segNum) {
|
||||
Logger.warn(`[STREAM] Segment #${segNum} Request is before starting segment number #${this.segmentStartNumber} - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 5))
|
||||
return segStartTime
|
||||
} else if (this.isTranscodeComplete) {
|
||||
return false
|
||||
}
|
||||
|
||||
const distanceFromFurthestSegment = segNum - this.furthestSegmentCreated
|
||||
if (distanceFromFurthestSegment > 10) {
|
||||
Logger.info(`Segment #${segNum} requested is ${distanceFromFurthestSegment} segments from latest (${secondsToTimestamp(segStartTime)}) - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 2))
|
||||
return segStartTime
|
||||
if (this.furthestSegmentCreated) {
|
||||
const distanceFromFurthestSegment = segNum - this.furthestSegmentCreated
|
||||
if (distanceFromFurthestSegment > 10) {
|
||||
Logger.info(`Segment #${segNum} requested is ${distanceFromFurthestSegment} segments from latest (${secondsToTimestamp(segStartTime)}) - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 5))
|
||||
return segStartTime
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
|
|
@ -171,7 +172,7 @@ class Stream extends EventEmitter {
|
|||
var files = await fs.readdir(this.streamPath)
|
||||
files.forEach((file) => {
|
||||
var extname = Path.extname(file)
|
||||
if (extname === '.ts' || extname === '.m4s') {
|
||||
if (extname === '.ts') {
|
||||
var basename = Path.basename(file, extname)
|
||||
var num_part = basename.split('-')[1]
|
||||
var part_num = Number(num_part)
|
||||
|
|
@ -251,6 +252,7 @@ class Stream extends EventEmitter {
|
|||
Logger.info(`[STREAM] START STREAM - Num Segments: ${this.numSegments}`)
|
||||
|
||||
this.ffmpeg = Ffmpeg()
|
||||
this.furthestSegmentCreated = 0
|
||||
|
||||
var adjustedStartTime = Math.max(this.startTime - this.maxSeekBackTime, 0)
|
||||
var trackStartTime = await writeConcatFile(this.tracks, this.concatFilesPath, adjustedStartTime)
|
||||
|
|
@ -339,9 +341,9 @@ class Stream extends EventEmitter {
|
|||
} else {
|
||||
Logger.error('Ffmpeg Err', '"' + err.message + '"')
|
||||
|
||||
// Temporary workaround for https://github.com/advplyr/audiobookshelf/issues/172
|
||||
const aacErrorMsg = 'ffmpeg exited with code 1: Could not write header for output file #0 (incorrect codec parameters ?)'
|
||||
if (audioCodec === 'copy' && this.isAACEncodable && err.message && err.message.startsWith(aacErrorMsg)) {
|
||||
// Temporary workaround for https://github.com/advplyr/audiobookshelf/issues/172 and https://github.com/advplyr/audiobookshelf/issues/2157
|
||||
const aacErrorMsg = 'ffmpeg exited with code 1:'
|
||||
if (audioCodec === 'copy' && this.isAACEncodable && err.message?.startsWith(aacErrorMsg)) {
|
||||
Logger.info(`[Stream] Re-attempting stream with AAC encode`)
|
||||
this.transcodeOptions.forceAAC = true
|
||||
this.reset(this.startTime)
|
||||
|
|
@ -435,4 +437,4 @@ class Stream extends EventEmitter {
|
|||
return newAudioTrack
|
||||
}
|
||||
}
|
||||
module.exports = Stream
|
||||
module.exports = Stream
|
||||
|
|
|
|||
|
|
@ -2,19 +2,30 @@ const uuidv4 = require("uuid").v4
|
|||
|
||||
class Task {
|
||||
constructor() {
|
||||
/** @type {string} */
|
||||
this.id = null
|
||||
/** @type {string} */
|
||||
this.action = null // e.g. embed-metadata, encode-m4b, etc
|
||||
/** @type {Object} custom data */
|
||||
this.data = null // additional info for the action like libraryItemId
|
||||
|
||||
/** @type {string} */
|
||||
this.title = null
|
||||
/** @type {string} */
|
||||
this.description = null
|
||||
/** @type {string} */
|
||||
this.error = null
|
||||
this.showSuccess = false // If true client side should keep the task visible after success
|
||||
/** @type {boolean} client should keep the task visible after success */
|
||||
this.showSuccess = false
|
||||
|
||||
/** @type {boolean} */
|
||||
this.isFailed = false
|
||||
/** @type {boolean} */
|
||||
this.isFinished = false
|
||||
|
||||
/** @type {number} */
|
||||
this.startedAt = null
|
||||
/** @type {number} */
|
||||
this.finishedAt = null
|
||||
}
|
||||
|
||||
|
|
@ -34,6 +45,15 @@ class Task {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set initial task data
|
||||
*
|
||||
* @param {string} action
|
||||
* @param {string} title
|
||||
* @param {string} description
|
||||
* @param {boolean} showSuccess
|
||||
* @param {Object} [data]
|
||||
*/
|
||||
setData(action, title, description, showSuccess, data = {}) {
|
||||
this.id = uuidv4()
|
||||
this.action = action
|
||||
|
|
@ -44,6 +64,11 @@ class Task {
|
|||
this.startedAt = Date.now()
|
||||
}
|
||||
|
||||
/**
|
||||
* Set task as failed
|
||||
*
|
||||
* @param {string} message error message
|
||||
*/
|
||||
setFailed(message) {
|
||||
this.error = message
|
||||
this.isFailed = true
|
||||
|
|
@ -51,6 +76,11 @@ class Task {
|
|||
this.setFinished()
|
||||
}
|
||||
|
||||
/**
|
||||
* Set task as finished
|
||||
*
|
||||
* @param {string} [newDescription] update description
|
||||
*/
|
||||
setFinished(newDescription = null) {
|
||||
if (newDescription) {
|
||||
this.description = newDescription
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const { cleanStringForSearch, areEquivalent, copyValue } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue } = require('../../utils/index')
|
||||
const AudioFile = require('../files/AudioFile')
|
||||
const AudioTrack = require('../files/AudioTrack')
|
||||
|
||||
|
|
@ -20,6 +18,7 @@ class PodcastEpisode {
|
|||
this.subtitle = null
|
||||
this.description = null
|
||||
this.enclosure = null
|
||||
this.guid = null
|
||||
this.pubDate = null
|
||||
this.chapters = []
|
||||
|
||||
|
|
@ -46,6 +45,7 @@ class PodcastEpisode {
|
|||
this.subtitle = episode.subtitle
|
||||
this.description = episode.description
|
||||
this.enclosure = episode.enclosure ? { ...episode.enclosure } : null
|
||||
this.guid = episode.guid || null
|
||||
this.pubDate = episode.pubDate
|
||||
this.chapters = episode.chapters?.map(ch => ({ ...ch })) || []
|
||||
this.audioFile = new AudioFile(episode.audioFile)
|
||||
|
|
@ -70,6 +70,7 @@ class PodcastEpisode {
|
|||
subtitle: this.subtitle,
|
||||
description: this.description,
|
||||
enclosure: this.enclosure ? { ...this.enclosure } : null,
|
||||
guid: this.guid,
|
||||
pubDate: this.pubDate,
|
||||
chapters: this.chapters.map(ch => ({ ...ch })),
|
||||
audioFile: this.audioFile.toJSON(),
|
||||
|
|
@ -93,6 +94,7 @@ class PodcastEpisode {
|
|||
subtitle: this.subtitle,
|
||||
description: this.description,
|
||||
enclosure: this.enclosure ? { ...this.enclosure } : null,
|
||||
guid: this.guid,
|
||||
pubDate: this.pubDate,
|
||||
chapters: this.chapters.map(ch => ({ ...ch })),
|
||||
audioFile: this.audioFile.toJSON(),
|
||||
|
|
@ -133,6 +135,7 @@ class PodcastEpisode {
|
|||
this.pubDate = data.pubDate || ''
|
||||
this.description = data.description || ''
|
||||
this.enclosure = data.enclosure ? { ...data.enclosure } : null
|
||||
this.guid = data.guid || null
|
||||
this.season = data.season || ''
|
||||
this.episode = data.episode || ''
|
||||
this.episodeType = data.episodeType || 'full'
|
||||
|
|
@ -141,19 +144,6 @@ class PodcastEpisode {
|
|||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
setDataFromAudioFile(audioFile, index) {
|
||||
this.id = uuidv4()
|
||||
this.audioFile = audioFile
|
||||
this.title = Path.basename(audioFile.metadata.filename, Path.extname(audioFile.metadata.filename))
|
||||
this.index = index
|
||||
|
||||
this.setDataFromAudioMetaTags(audioFile.metaTags, true)
|
||||
|
||||
this.chapters = audioFile.chapters?.map((c) => ({ ...c }))
|
||||
this.addedAt = Date.now()
|
||||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
let hasUpdates = false
|
||||
for (const key in this.toJSON()) {
|
||||
|
|
@ -187,80 +177,5 @@ class PodcastEpisode {
|
|||
if (!this.enclosure || !this.enclosure.url) return false
|
||||
return this.enclosure.url == url
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
return cleanStringForSearch(this.title).includes(query)
|
||||
}
|
||||
|
||||
setDataFromAudioMetaTags(audioFileMetaTags, overrideExistingDetails = false) {
|
||||
if (!audioFileMetaTags) return false
|
||||
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComment',
|
||||
altTag: 'tagSubtitle',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'pubDate'
|
||||
},
|
||||
{
|
||||
tag: 'tagDisc',
|
||||
key: 'season',
|
||||
},
|
||||
{
|
||||
tag: 'tagTrack',
|
||||
altTag: 'tagSeriesPart',
|
||||
key: 'episode'
|
||||
},
|
||||
{
|
||||
tag: 'tagTitle',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagEpisodeType',
|
||||
key: 'episodeType'
|
||||
}
|
||||
]
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
tagToUse = mapping.altTag
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'pubDate' && (!this.pubDate || overrideExistingDetails)) {
|
||||
const pubJsDate = new Date(value)
|
||||
if (pubJsDate && !isNaN(pubJsDate)) {
|
||||
this.publishedAt = pubJsDate.valueOf()
|
||||
this.pubDate = value
|
||||
Logger.debug(`[PodcastEpisode] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${this[mapping.key]}`)
|
||||
} else {
|
||||
Logger.warn(`[PodcastEpisode] Mapping pubDate with tag ${tagToUse} has invalid date "${value}"`)
|
||||
}
|
||||
} else if (mapping.key === 'episodeType' && (!this.episodeType || overrideExistingDetails)) {
|
||||
if (['full', 'trailer', 'bonus'].includes(value)) {
|
||||
this.episodeType = value
|
||||
Logger.debug(`[PodcastEpisode] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${this[mapping.key]}`)
|
||||
} else {
|
||||
Logger.warn(`[PodcastEpisode] Mapping episodeType with invalid value "${value}". Must be one of [full, trailer, bonus].`)
|
||||
}
|
||||
} else if (!this[mapping.key] || overrideExistingDetails) {
|
||||
this[mapping.key] = value
|
||||
Logger.debug(`[PodcastEpisode] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${this[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = PodcastEpisode
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const BookMetadata = require('../metadata/BookMetadata')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch } = require('../../utils/index')
|
||||
const { parseOpfMetadataXML } = require('../../utils/parsers/parseOpfMetadata')
|
||||
const { parseOverdriveMediaMarkersAsChapters } = require('../../utils/parsers/parseOverdriveMediaMarkers')
|
||||
const abmetadataGenerator = require('../../utils/generators/abmetadataGenerator')
|
||||
const { readTextFile, filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
const { areEquivalent, copyValue } = require('../../utils/index')
|
||||
const { filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
const AudioFile = require('../files/AudioFile')
|
||||
const AudioTrack = require('../files/AudioTrack')
|
||||
const EBookFile = require('../files/EBookFile')
|
||||
|
|
@ -98,7 +94,7 @@ class Book {
|
|||
return {
|
||||
tags: [...this.tags],
|
||||
chapters: this.chapters.map(c => ({ ...c })),
|
||||
metadata: this.metadata.toJSONForMetadataFile()
|
||||
...this.metadata.toJSONForMetadataFile()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -113,23 +109,12 @@ class Book {
|
|||
get hasMediaEntities() {
|
||||
return !!this.tracks.length || this.ebookFile
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
if (this.coverPath) return false
|
||||
if (!this.lastCoverSearch || this.metadata.coverSearchQuery !== this.lastCoverSearchQuery) return true
|
||||
return (Date.now() - this.lastCoverSearch) > 1000 * 60 * 60 * 24 * 7 // 7 day
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return this.audioFiles.some(af => af.embeddedCoverArt)
|
||||
}
|
||||
get invalidAudioFiles() {
|
||||
return this.audioFiles.filter(af => af.invalid)
|
||||
}
|
||||
get includedAudioFiles() {
|
||||
return this.audioFiles.filter(af => !af.exclude && !af.invalid)
|
||||
}
|
||||
get hasIssues() {
|
||||
return this.missingParts.length || this.invalidAudioFiles.length
|
||||
}
|
||||
get tracks() {
|
||||
let startOffset = 0
|
||||
return this.includedAudioFiles.map((af) => {
|
||||
|
|
@ -228,159 +213,6 @@ class Book {
|
|||
return null
|
||||
}
|
||||
|
||||
updateLastCoverSearch(coverWasFound) {
|
||||
this.lastCoverSearch = coverWasFound ? null : Date.now()
|
||||
this.lastCoverSearchQuery = coverWasFound ? null : this.metadata.coverSearchQuery
|
||||
}
|
||||
|
||||
// Audio file metadata tags map to book details (will not overwrite)
|
||||
setMetadataFromAudioFile(overrideExistingDetails = false) {
|
||||
if (!this.audioFiles.length) return false
|
||||
var audioFile = this.audioFiles[0]
|
||||
if (!audioFile.metaTags) return false
|
||||
return this.metadata.setDataFromAudioMetaTags(audioFile.metaTags, overrideExistingDetails)
|
||||
}
|
||||
|
||||
setData(mediaPayload) {
|
||||
this.metadata = new BookMetadata()
|
||||
if (mediaPayload.metadata) {
|
||||
this.metadata.setData(mediaPayload.metadata)
|
||||
}
|
||||
}
|
||||
|
||||
// Look for desc.txt, reader.txt, metadata.abs and opf file then update details if found
|
||||
async syncMetadataFiles(textMetadataFiles, opfMetadataOverrideDetails) {
|
||||
let metadataUpdatePayload = {}
|
||||
let hasUpdated = false
|
||||
|
||||
const descTxt = textMetadataFiles.find(lf => lf.metadata.filename === 'desc.txt')
|
||||
if (descTxt) {
|
||||
const descriptionText = await readTextFile(descTxt.metadata.path)
|
||||
if (descriptionText) {
|
||||
Logger.debug(`[Book] "${this.metadata.title}" found desc.txt updating description with "${descriptionText.slice(0, 20)}..."`)
|
||||
metadataUpdatePayload.description = descriptionText
|
||||
}
|
||||
}
|
||||
const readerTxt = textMetadataFiles.find(lf => lf.metadata.filename === 'reader.txt')
|
||||
if (readerTxt) {
|
||||
const narratorText = await readTextFile(readerTxt.metadata.path)
|
||||
if (narratorText) {
|
||||
Logger.debug(`[Book] "${this.metadata.title}" found reader.txt updating narrator with "${narratorText}"`)
|
||||
metadataUpdatePayload.narrators = this.metadata.parseNarratorsTag(narratorText)
|
||||
}
|
||||
}
|
||||
|
||||
const metadataIsJSON = global.ServerSettings.metadataFileFormat === 'json'
|
||||
const metadataAbs = textMetadataFiles.find(lf => lf.metadata.filename === 'metadata.abs')
|
||||
const metadataJson = textMetadataFiles.find(lf => lf.metadata.filename === 'metadata.json')
|
||||
|
||||
const metadataFile = metadataIsJSON ? metadataJson : metadataAbs
|
||||
if (metadataFile) {
|
||||
Logger.debug(`[Book] Found ${metadataFile.metadata.filename} file for "${this.metadata.title}"`)
|
||||
const metadataText = await readTextFile(metadataFile.metadata.path)
|
||||
const abmetadataUpdates = abmetadataGenerator.parseAndCheckForUpdates(metadataText, this, 'book', metadataIsJSON)
|
||||
if (abmetadataUpdates && Object.keys(abmetadataUpdates).length) {
|
||||
Logger.debug(`[Book] "${this.metadata.title}" changes found in metadata.abs file`, abmetadataUpdates)
|
||||
|
||||
if (abmetadataUpdates.tags) { // Set media tags if updated
|
||||
this.tags = abmetadataUpdates.tags
|
||||
hasUpdated = true
|
||||
}
|
||||
if (abmetadataUpdates.chapters) { // Set chapters if updated
|
||||
this.chapters = abmetadataUpdates.chapters
|
||||
hasUpdated = true
|
||||
}
|
||||
if (abmetadataUpdates.metadata) {
|
||||
metadataUpdatePayload = {
|
||||
...metadataUpdatePayload,
|
||||
...abmetadataUpdates.metadata
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (metadataAbs || metadataJson) { // Has different metadata file format so mark as updated
|
||||
Logger.debug(`[Book] Found different format metadata file ${(metadataAbs || metadataJson).metadata.filename}, expecting .${global.ServerSettings.metadataFileFormat} for "${this.metadata.title}"`)
|
||||
hasUpdated = true
|
||||
}
|
||||
|
||||
const metadataOpf = textMetadataFiles.find(lf => lf.isOPFFile || lf.metadata.filename === 'metadata.xml')
|
||||
if (metadataOpf) {
|
||||
const xmlText = await readTextFile(metadataOpf.metadata.path)
|
||||
if (xmlText) {
|
||||
const opfMetadata = await parseOpfMetadataXML(xmlText)
|
||||
if (opfMetadata) {
|
||||
for (const key in opfMetadata) {
|
||||
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length && (!this.tags.length || opfMetadataOverrideDetails)) {
|
||||
this.tags = opfMetadata.tags
|
||||
hasUpdated = true
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length && (!this.metadata.genres.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload[key] = opfMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (opfMetadata.authors && opfMetadata.authors.length && (!this.metadata.authors.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload.authors = opfMetadata.authors.map(authorName => {
|
||||
return {
|
||||
id: `new-${Math.floor(Math.random() * 1000000)}`,
|
||||
name: authorName
|
||||
}
|
||||
})
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (opfMetadata.narrators?.length && (!this.metadata.narrators.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload.narrators = opfMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (opfMetadata.series && (!this.metadata.series.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload.series = this.metadata.parseSeriesTag(opfMetadata.series, opfMetadata.sequence)
|
||||
}
|
||||
} else if (opfMetadata[key] && ((!this.metadata[key] && !metadataUpdatePayload[key]) || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload[key] = opfMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataUpdatePayload).length) {
|
||||
return this.metadata.update(metadataUpdatePayload) || hasUpdated
|
||||
}
|
||||
return hasUpdated
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
const payload = {
|
||||
tags: this.tags.filter(t => cleanStringForSearch(t).includes(query)),
|
||||
series: this.metadata.searchSeries(query),
|
||||
authors: this.metadata.searchAuthors(query),
|
||||
narrators: this.metadata.searchNarrators(query),
|
||||
matchKey: null,
|
||||
matchText: null
|
||||
}
|
||||
const metadataMatch = this.metadata.searchQuery(query)
|
||||
if (metadataMatch) {
|
||||
payload.matchKey = metadataMatch.matchKey
|
||||
payload.matchText = metadataMatch.matchText
|
||||
} else {
|
||||
if (payload.authors.length) {
|
||||
payload.matchKey = 'authors'
|
||||
payload.matchText = this.metadata.authorName
|
||||
} else if (payload.series.length) {
|
||||
payload.matchKey = 'series'
|
||||
payload.matchText = this.metadata.seriesName
|
||||
} else if (payload.tags.length) {
|
||||
payload.matchKey = 'tags'
|
||||
payload.matchText = this.tags.join(', ')
|
||||
} else if (payload.narrators.length) {
|
||||
payload.matchKey = 'narrators'
|
||||
payload.matchText = this.metadata.narratorName
|
||||
}
|
||||
}
|
||||
return payload
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the EBookFile from a LibraryFile
|
||||
* If null then ebookFile will be removed from the book
|
||||
|
|
@ -426,113 +258,6 @@ class Book {
|
|||
Logger.debug(`[Book] Tracks being rebuilt...!`)
|
||||
this.audioFiles.sort((a, b) => a.index - b.index)
|
||||
this.missingParts = []
|
||||
this.setChapters()
|
||||
this.checkUpdateMissingTracks()
|
||||
}
|
||||
|
||||
checkUpdateMissingTracks() {
|
||||
var currMissingParts = (this.missingParts || []).join(',') || ''
|
||||
|
||||
var current_index = 1
|
||||
var missingParts = []
|
||||
|
||||
for (let i = 0; i < this.tracks.length; i++) {
|
||||
var _track = this.tracks[i]
|
||||
if (_track.index > current_index) {
|
||||
var num_parts_missing = _track.index - current_index
|
||||
for (let x = 0; x < num_parts_missing && x < 9999; x++) {
|
||||
missingParts.push(current_index + x)
|
||||
}
|
||||
}
|
||||
current_index = _track.index + 1
|
||||
}
|
||||
|
||||
this.missingParts = missingParts
|
||||
|
||||
var newMissingParts = (this.missingParts || []).join(',') || ''
|
||||
var wasUpdated = newMissingParts !== currMissingParts
|
||||
if (wasUpdated && this.missingParts.length) {
|
||||
Logger.info(`[Audiobook] "${this.metadata.title}" has ${missingParts.length} missing parts`)
|
||||
}
|
||||
|
||||
return wasUpdated
|
||||
}
|
||||
|
||||
setChapters() {
|
||||
const preferOverdriveMediaMarker = !!global.ServerSettings.scannerPreferOverdriveMediaMarker
|
||||
|
||||
// If 1 audio file without chapters, then no chapters will be set
|
||||
const includedAudioFiles = this.audioFiles.filter(af => !af.exclude)
|
||||
if (!includedAudioFiles.length) return
|
||||
|
||||
// If overdrive media markers are present and preferred, use those instead
|
||||
if (preferOverdriveMediaMarker) {
|
||||
const overdriveChapters = parseOverdriveMediaMarkersAsChapters(includedAudioFiles)
|
||||
if (overdriveChapters) {
|
||||
Logger.info('[Book] Overdrive Media Markers and preference found! Using these for chapter definitions')
|
||||
this.chapters = overdriveChapters
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// If first audio file has embedded chapters then use embedded chapters
|
||||
if (includedAudioFiles[0].chapters?.length) {
|
||||
// If all files chapters are the same, then only make chapters for the first file
|
||||
if (
|
||||
includedAudioFiles.length === 1 ||
|
||||
includedAudioFiles.length > 1 &&
|
||||
includedAudioFiles[0].chapters.length === includedAudioFiles[1].chapters?.length &&
|
||||
includedAudioFiles[0].chapters.every((c, i) => c.title === includedAudioFiles[1].chapters[i].title)
|
||||
) {
|
||||
Logger.debug(`[Book] setChapters: Using embedded chapters in first audio file ${includedAudioFiles[0].metadata?.path}`)
|
||||
this.chapters = includedAudioFiles[0].chapters.map((c) => ({ ...c }))
|
||||
} else {
|
||||
Logger.debug(`[Book] setChapters: Using embedded chapters from all audio files ${includedAudioFiles[0].metadata?.path}`)
|
||||
this.chapters = []
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
|
||||
includedAudioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
const chapters = file.chapters?.map((c) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime,
|
||||
})) ?? []
|
||||
this.chapters = this.chapters.concat(chapters)
|
||||
|
||||
currChapterId += file.chapters?.length ?? 0
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
} else if (includedAudioFiles.length > 1) {
|
||||
const preferAudioMetadata = !!global.ServerSettings.scannerPreferAudioMetadata
|
||||
|
||||
// Build chapters from audio files
|
||||
this.chapters = []
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
includedAudioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
let title = file.metadata.filename ? Path.basename(file.metadata.filename, Path.extname(file.metadata.filename)) : `Chapter ${currChapterId}`
|
||||
|
||||
// When prefer audio metadata server setting is set then use ID3 title tag as long as it is not the same as the book title
|
||||
if (preferAudioMetadata && file.metaTags?.tagTitle && file.metaTags?.tagTitle !== this.metadata.title) {
|
||||
title = file.metaTags.tagTitle
|
||||
}
|
||||
|
||||
this.chapters.push({
|
||||
id: currChapterId++,
|
||||
start: currStartTime,
|
||||
end: currStartTime + file.duration,
|
||||
title
|
||||
})
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Only checks container format
|
||||
|
|
|
|||
|
|
@ -65,15 +65,6 @@ class Music {
|
|||
get hasMediaEntities() {
|
||||
return !!this.audioFile
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
return false
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return this.audioFile.embeddedCoverArt
|
||||
}
|
||||
get hasIssues() {
|
||||
return false
|
||||
}
|
||||
get duration() {
|
||||
return this.audioFile.duration || 0
|
||||
}
|
||||
|
|
@ -134,20 +125,6 @@ class Music {
|
|||
this.audioFile = audioFile
|
||||
}
|
||||
|
||||
setMetadataFromAudioFile(overrideExistingDetails = false) {
|
||||
if (!this.audioFile) return false
|
||||
if (!this.audioFile.metaTags) return false
|
||||
return this.metadata.setDataFromAudioMetaTags(this.audioFile.metaTags, overrideExistingDetails)
|
||||
}
|
||||
|
||||
syncMetadataFiles(textMetadataFiles, opfMetadataOverrideDetails) {
|
||||
return false
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
return {}
|
||||
}
|
||||
|
||||
// Only checks container format
|
||||
checkCanDirectPlay(payload) {
|
||||
return true
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
const Logger = require('../../Logger')
|
||||
const PodcastEpisode = require('../entities/PodcastEpisode')
|
||||
const PodcastMetadata = require('../metadata/PodcastMetadata')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch } = require('../../utils/index')
|
||||
const abmetadataGenerator = require('../../utils/generators/abmetadataGenerator')
|
||||
const { readTextFile, filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
const { areEquivalent, copyValue } = require('../../utils/index')
|
||||
const { filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
|
||||
class Podcast {
|
||||
constructor(podcast) {
|
||||
|
|
@ -98,7 +97,19 @@ class Podcast {
|
|||
toJSONForMetadataFile() {
|
||||
return {
|
||||
tags: [...this.tags],
|
||||
metadata: this.metadata.toJSON()
|
||||
title: this.metadata.title,
|
||||
author: this.metadata.author,
|
||||
description: this.metadata.description,
|
||||
releaseDate: this.metadata.releaseDate,
|
||||
genres: [...this.metadata.genres],
|
||||
feedURL: this.metadata.feedUrl,
|
||||
imageURL: this.metadata.imageUrl,
|
||||
itunesPageURL: this.metadata.itunesPageUrl,
|
||||
itunesId: this.metadata.itunesId,
|
||||
itunesArtistId: this.metadata.itunesArtistId,
|
||||
explicit: this.metadata.explicit,
|
||||
language: this.metadata.language,
|
||||
podcastType: this.metadata.type
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -110,15 +121,6 @@ class Podcast {
|
|||
get hasMediaEntities() {
|
||||
return !!this.episodes.length
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
return false
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return this.episodes.some(ep => ep.audioFile.embeddedCoverArt)
|
||||
}
|
||||
get hasIssues() {
|
||||
return false
|
||||
}
|
||||
get duration() {
|
||||
let total = 0
|
||||
this.episodes.forEach((ep) => total += ep.duration)
|
||||
|
|
@ -187,10 +189,6 @@ class Podcast {
|
|||
return null
|
||||
}
|
||||
|
||||
findEpisodeWithInode(inode) {
|
||||
return this.episodes.find(ep => ep.audioFile.ino === inode)
|
||||
}
|
||||
|
||||
setData(mediaData) {
|
||||
this.metadata = new PodcastMetadata()
|
||||
if (mediaData.metadata) {
|
||||
|
|
@ -203,62 +201,6 @@ class Podcast {
|
|||
this.lastEpisodeCheck = Date.now() // Makes sure new episodes are after this
|
||||
}
|
||||
|
||||
async syncMetadataFiles(textMetadataFiles, opfMetadataOverrideDetails) {
|
||||
let metadataUpdatePayload = {}
|
||||
let tagsUpdated = false
|
||||
|
||||
const metadataAbs = textMetadataFiles.find(lf => lf.metadata.filename === 'metadata.abs' || lf.metadata.filename === 'metadata.json')
|
||||
if (metadataAbs) {
|
||||
const isJSON = metadataAbs.metadata.filename === 'metadata.json'
|
||||
const metadataText = await readTextFile(metadataAbs.metadata.path)
|
||||
const abmetadataUpdates = abmetadataGenerator.parseAndCheckForUpdates(metadataText, this, 'podcast', isJSON)
|
||||
if (abmetadataUpdates && Object.keys(abmetadataUpdates).length) {
|
||||
Logger.debug(`[Podcast] "${this.metadata.title}" changes found in metadata.abs file`, abmetadataUpdates)
|
||||
|
||||
if (abmetadataUpdates.tags) { // Set media tags if updated
|
||||
this.tags = abmetadataUpdates.tags
|
||||
tagsUpdated = true
|
||||
}
|
||||
if (abmetadataUpdates.metadata) {
|
||||
metadataUpdatePayload = {
|
||||
...metadataUpdatePayload,
|
||||
...abmetadataUpdates.metadata
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataUpdatePayload).length) {
|
||||
return this.metadata.update(metadataUpdatePayload) || tagsUpdated
|
||||
}
|
||||
return tagsUpdated
|
||||
}
|
||||
|
||||
searchEpisodes(query) {
|
||||
return this.episodes.filter(ep => ep.searchQuery(query))
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
const payload = {
|
||||
tags: this.tags.filter(t => cleanStringForSearch(t).includes(query)),
|
||||
matchKey: null,
|
||||
matchText: null
|
||||
}
|
||||
const metadataMatch = this.metadata.searchQuery(query)
|
||||
if (metadataMatch) {
|
||||
payload.matchKey = metadataMatch.matchKey
|
||||
payload.matchText = metadataMatch.matchText
|
||||
} else {
|
||||
const matchingEpisodes = this.searchEpisodes(query)
|
||||
if (matchingEpisodes.length) {
|
||||
payload.matchKey = 'episode'
|
||||
payload.matchText = matchingEpisodes[0].title
|
||||
}
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
|
||||
checkHasEpisode(episodeId) {
|
||||
return this.episodes.some(ep => ep.id === episodeId)
|
||||
}
|
||||
|
|
@ -325,14 +267,6 @@ class Podcast {
|
|||
return this.episodes.find(ep => ep.id == episodeId)
|
||||
}
|
||||
|
||||
// Audio file metadata tags map to podcast details
|
||||
setMetadataFromAudioFile(overrideExistingDetails = false) {
|
||||
if (!this.episodes.length) return false
|
||||
const audioFile = this.episodes[0].audioFile
|
||||
if (!audioFile?.metaTags) return false
|
||||
return this.metadata.setDataFromAudioMetaTags(audioFile.metaTags, overrideExistingDetails)
|
||||
}
|
||||
|
||||
getChapters(episodeId) {
|
||||
return this.getEpisode(episodeId)?.chapters?.map(ch => ({ ...ch })) || []
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,15 +69,6 @@ class Video {
|
|||
get hasMediaEntities() {
|
||||
return true
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
return false
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return false
|
||||
}
|
||||
get hasIssues() {
|
||||
return false
|
||||
}
|
||||
get duration() {
|
||||
return 0
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const parseNameString = require('../../utils/parsers/parseNameString')
|
||||
class BookMetadata {
|
||||
constructor(metadata) {
|
||||
|
|
@ -144,20 +144,6 @@ class BookMetadata {
|
|||
return `${se.name} #${se.sequence}`
|
||||
}).join(', ')
|
||||
}
|
||||
get seriesNameIgnorePrefix() {
|
||||
if (!this.series.length) return ''
|
||||
return this.series.map(se => {
|
||||
if (!se.sequence) return getTitleIgnorePrefix(se.name)
|
||||
return `${getTitleIgnorePrefix(se.name)} #${se.sequence}`
|
||||
}).join(', ')
|
||||
}
|
||||
get seriesNamePrefixAtEnd() {
|
||||
if (!this.series.length) return ''
|
||||
return this.series.map(se => {
|
||||
if (!se.sequence) return getTitlePrefixAtEnd(se.name)
|
||||
return `${getTitlePrefixAtEnd(se.name)} #${se.sequence}`
|
||||
}).join(', ')
|
||||
}
|
||||
get firstSeriesName() {
|
||||
if (!this.series.length) return ''
|
||||
return this.series[0].name
|
||||
|
|
@ -169,36 +155,15 @@ class BookMetadata {
|
|||
get narratorName() {
|
||||
return this.narrators.join(', ')
|
||||
}
|
||||
get coverSearchQuery() {
|
||||
if (!this.authorName) return this.title
|
||||
return this.title + '&' + this.authorName
|
||||
}
|
||||
|
||||
hasAuthor(id) {
|
||||
return !!this.authors.find(au => au.id == id)
|
||||
}
|
||||
hasSeries(seriesId) {
|
||||
return !!this.series.find(se => se.id == seriesId)
|
||||
}
|
||||
hasNarrator(narratorName) {
|
||||
return this.narrators.includes(narratorName)
|
||||
}
|
||||
getSeries(seriesId) {
|
||||
return this.series.find(se => se.id == seriesId)
|
||||
}
|
||||
getFirstSeries() {
|
||||
return this.series.length ? this.series[0] : null
|
||||
}
|
||||
getSeriesSequence(seriesId) {
|
||||
const series = this.series.find(se => se.id == seriesId)
|
||||
if (!series) return null
|
||||
return series.sequence || ''
|
||||
}
|
||||
getSeriesSortTitle(series) {
|
||||
if (!series) return ''
|
||||
if (!series.sequence) return series.name
|
||||
return `${series.name} #${series.sequence}`
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
const json = this.toJSON()
|
||||
|
|
@ -231,205 +196,5 @@ class BookMetadata {
|
|||
name: newAuthor.name
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update narrator name if narrator is in book
|
||||
* @param {String} oldNarratorName - Narrator name to get updated
|
||||
* @param {String} newNarratorName - Updated narrator name
|
||||
* @return {Boolean} True if narrator was updated
|
||||
*/
|
||||
updateNarrator(oldNarratorName, newNarratorName) {
|
||||
if (!this.hasNarrator(oldNarratorName)) return false
|
||||
this.narrators = this.narrators.filter(n => n !== oldNarratorName)
|
||||
if (newNarratorName && !this.hasNarrator(newNarratorName)) {
|
||||
this.narrators.push(newNarratorName)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove narrator name if narrator is in book
|
||||
* @param {String} narratorName - Narrator name to remove
|
||||
* @return {Boolean} True if narrator was updated
|
||||
*/
|
||||
removeNarrator(narratorName) {
|
||||
if (!this.hasNarrator(narratorName)) return false
|
||||
this.narrators = this.narrators.filter(n => n !== narratorName)
|
||||
return true
|
||||
}
|
||||
|
||||
setData(scanMediaData = {}) {
|
||||
this.title = scanMediaData.title || null
|
||||
this.subtitle = scanMediaData.subtitle || null
|
||||
this.narrators = this.parseNarratorsTag(scanMediaData.narrators)
|
||||
this.publishedYear = scanMediaData.publishedYear || null
|
||||
this.description = scanMediaData.description || null
|
||||
this.isbn = scanMediaData.isbn || null
|
||||
this.asin = scanMediaData.asin || null
|
||||
this.language = scanMediaData.language || null
|
||||
this.genres = []
|
||||
this.explicit = !!scanMediaData.explicit
|
||||
|
||||
if (scanMediaData.author) {
|
||||
this.authors = this.parseAuthorsTag(scanMediaData.author)
|
||||
}
|
||||
if (scanMediaData.series) {
|
||||
this.series = this.parseSeriesTag(scanMediaData.series, scanMediaData.sequence)
|
||||
}
|
||||
}
|
||||
|
||||
setDataFromAudioMetaTags(audioFileMetaTags, overrideExistingDetails = false) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComposer',
|
||||
key: 'narrators'
|
||||
},
|
||||
{
|
||||
tag: 'tagDescription',
|
||||
altTag: 'tagComment',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagPublisher',
|
||||
key: 'publisher'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'publishedYear'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagTitle',
|
||||
key: 'title',
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
altTag: 'tagAlbumArtist',
|
||||
key: 'authors'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagSeries',
|
||||
key: 'series'
|
||||
},
|
||||
{
|
||||
tag: 'tagIsbn',
|
||||
key: 'isbn'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagASIN',
|
||||
key: 'asin'
|
||||
}
|
||||
]
|
||||
|
||||
const updatePayload = {}
|
||||
|
||||
// Metadata is only mapped to the book if it is empty
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
// let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
// tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'narrators' && (!this.narrators.length || overrideExistingDetails)) {
|
||||
updatePayload.narrators = this.parseNarratorsTag(value)
|
||||
} else if (mapping.key === 'authors' && (!this.authors.length || overrideExistingDetails)) {
|
||||
updatePayload.authors = this.parseAuthorsTag(value)
|
||||
} else if (mapping.key === 'genres' && (!this.genres.length || overrideExistingDetails)) {
|
||||
updatePayload.genres = this.parseGenresTag(value)
|
||||
} else if (mapping.key === 'series' && (!this.series.length || overrideExistingDetails)) {
|
||||
const sequenceTag = audioFileMetaTags.tagSeriesPart || null
|
||||
updatePayload.series = this.parseSeriesTag(value, sequenceTag)
|
||||
} else if (!this[mapping.key] || overrideExistingDetails) {
|
||||
updatePayload[mapping.key] = value
|
||||
// Logger.debug(`[Book] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${updatePayload[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (Object.keys(updatePayload).length) {
|
||||
return this.update(updatePayload)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Returns array of names in First Last format
|
||||
parseNarratorsTag(narratorsTag) {
|
||||
const parsed = parseNameString.parse(narratorsTag)
|
||||
return parsed ? parsed.names : []
|
||||
}
|
||||
|
||||
// Return array of authors minified with placeholder id
|
||||
parseAuthorsTag(authorsTag) {
|
||||
const parsed = parseNameString.parse(authorsTag)
|
||||
if (!parsed) return []
|
||||
return (parsed.names || []).map((au) => {
|
||||
const findAuthor = this.authors.find(_au => _au.name == au)
|
||||
|
||||
return {
|
||||
id: findAuthor?.id || `new-${Math.floor(Math.random() * 1000000)}`,
|
||||
name: au
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
parseGenresTag(genreTag) {
|
||||
if (!genreTag || !genreTag.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
|
||||
// Return array with series with placeholder id
|
||||
parseSeriesTag(seriesTag, sequenceTag) {
|
||||
if (!seriesTag) return []
|
||||
return [{
|
||||
id: `new-${Math.floor(Math.random() * 1000000)}`,
|
||||
name: seriesTag,
|
||||
sequence: sequenceTag || ''
|
||||
}]
|
||||
}
|
||||
|
||||
searchSeries(query) {
|
||||
return this.series.filter(se => cleanStringForSearch(se.name).includes(query))
|
||||
}
|
||||
searchAuthors(query) {
|
||||
return this.authors.filter(au => cleanStringForSearch(au.name).includes(query))
|
||||
}
|
||||
searchNarrators(query) {
|
||||
return this.narrators.filter(n => cleanStringForSearch(n).includes(query))
|
||||
}
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
const keysToCheck = ['title', 'asin', 'isbn', 'subtitle']
|
||||
for (const key of keysToCheck) {
|
||||
if (this[key] && cleanStringForSearch(String(this[key])).includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
module.exports = BookMetadata
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
|
||||
class MusicMetadata {
|
||||
constructor(metadata) {
|
||||
|
|
@ -133,19 +133,6 @@ class MusicMetadata {
|
|||
return getTitlePrefixAtEnd(this.title)
|
||||
}
|
||||
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
const keysToCheck = ['title', 'album']
|
||||
for (const key of keysToCheck) {
|
||||
if (this[key] && cleanStringForSearch(String(this[key])).includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
setData(mediaMetadata = {}) {
|
||||
this.title = mediaMetadata.title || null
|
||||
this.artist = mediaMetadata.artist || null
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
|
||||
class PodcastMetadata {
|
||||
constructor(metadata) {
|
||||
|
|
@ -91,19 +91,6 @@ class PodcastMetadata {
|
|||
return getTitlePrefixAtEnd(this.title)
|
||||
}
|
||||
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
const keysToCheck = ['title', 'author', 'itunesId', 'itunesArtistId']
|
||||
for (const key of keysToCheck) {
|
||||
if (this[key] && cleanStringForSearch(String(this[key])).includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
setData(mediaMetadata = {}) {
|
||||
this.title = mediaMetadata.title || null
|
||||
this.author = mediaMetadata.author || null
|
||||
|
|
@ -136,74 +123,5 @@ class PodcastMetadata {
|
|||
}
|
||||
return hasUpdates
|
||||
}
|
||||
|
||||
setDataFromAudioMetaTags(audioFileMetaTags, overrideExistingDetails = false) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagSeries',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
key: 'author'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagItunesId',
|
||||
key: 'itunesId'
|
||||
},
|
||||
{
|
||||
tag: 'tagPodcastType',
|
||||
key: 'type',
|
||||
}
|
||||
]
|
||||
|
||||
const updatePayload = {}
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'genres' && (!this.genres.length || overrideExistingDetails)) {
|
||||
updatePayload.genres = this.parseGenresTag(value)
|
||||
Logger.debug(`[Podcast] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${updatePayload.genres.join(', ')}`)
|
||||
} else if (!this[mapping.key] || overrideExistingDetails) {
|
||||
updatePayload[mapping.key] = value
|
||||
Logger.debug(`[Podcast] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${updatePayload[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (Object.keys(updatePayload).length) {
|
||||
return this.update(updatePayload)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
parseGenresTag(genreTag) {
|
||||
if (!genreTag || !genreTag.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
}
|
||||
module.exports = PodcastMetadata
|
||||
|
|
|
|||
|
|
@ -55,19 +55,6 @@ class VideoMetadata {
|
|||
return getTitlePrefixAtEnd(this.title)
|
||||
}
|
||||
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
var keysToCheck = ['title']
|
||||
for (var key of keysToCheck) {
|
||||
if (this[key] && String(this[key]).toLowerCase().includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
setData(mediaMetadata = {}) {
|
||||
this.title = mediaMetadata.title || null
|
||||
this.description = mediaMetadata.description || null
|
||||
|
|
|
|||
|
|
@ -1,6 +1,14 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, isNullOrNaN } = require('../../utils')
|
||||
|
||||
/**
|
||||
* @typedef EreaderDeviceObject
|
||||
* @property {string} name
|
||||
* @property {string} email
|
||||
* @property {string} availabilityOption
|
||||
* @property {string[]} users
|
||||
*/
|
||||
|
||||
// REF: https://nodemailer.com/smtp/
|
||||
class EmailSettings {
|
||||
constructor(settings = null) {
|
||||
|
|
@ -13,7 +21,7 @@ class EmailSettings {
|
|||
this.testAddress = null
|
||||
this.fromAddress = null
|
||||
|
||||
// Array of { name:String, email:String }
|
||||
/** @type {EreaderDeviceObject[]} */
|
||||
this.ereaderDevices = []
|
||||
|
||||
if (settings) {
|
||||
|
|
@ -57,6 +65,26 @@ class EmailSettings {
|
|||
|
||||
if (payload.ereaderDevices !== undefined && !Array.isArray(payload.ereaderDevices)) payload.ereaderDevices = undefined
|
||||
|
||||
if (payload.ereaderDevices?.length) {
|
||||
// Validate ereader devices
|
||||
payload.ereaderDevices = payload.ereaderDevices.map((device) => {
|
||||
if (!device.name || !device.email) {
|
||||
Logger.error(`[EmailSettings] Update ereader device is invalid`, device)
|
||||
return null
|
||||
}
|
||||
if (!device.availabilityOption || !['adminOrUp', 'userOrUp', 'guestOrUp', 'specificUsers'].includes(device.availabilityOption)) {
|
||||
device.availabilityOption = 'adminOrUp'
|
||||
}
|
||||
if (device.availabilityOption === 'specificUsers' && !device.users?.length) {
|
||||
device.availabilityOption = 'adminOrUp'
|
||||
}
|
||||
if (device.availabilityOption !== 'specificUsers' && device.users?.length) {
|
||||
device.users = []
|
||||
}
|
||||
return device
|
||||
}).filter(d => d)
|
||||
}
|
||||
|
||||
let hasUpdates = false
|
||||
|
||||
const json = this.toJSON()
|
||||
|
|
@ -88,15 +116,40 @@ class EmailSettings {
|
|||
return payload
|
||||
}
|
||||
|
||||
getEReaderDevices(user) {
|
||||
// Only accessible to admin or up
|
||||
if (!user.isAdminOrUp) {
|
||||
return []
|
||||
/**
|
||||
*
|
||||
* @param {EreaderDeviceObject} device
|
||||
* @param {import('../user/User')} user
|
||||
* @returns {boolean}
|
||||
*/
|
||||
checkUserCanAccessDevice(device, user) {
|
||||
let deviceAvailability = device.availabilityOption || 'adminOrUp'
|
||||
if (deviceAvailability === 'adminOrUp' && user.isAdminOrUp) return true
|
||||
if (deviceAvailability === 'userOrUp' && (user.isAdminOrUp || user.isUser)) return true
|
||||
if (deviceAvailability === 'guestOrUp') return true
|
||||
if (deviceAvailability === 'specificUsers') {
|
||||
let deviceUsers = device.users || []
|
||||
return deviceUsers.includes(user.id)
|
||||
}
|
||||
|
||||
return this.ereaderDevices.map(d => ({ ...d }))
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ereader devices accessible to user
|
||||
*
|
||||
* @param {import('../user/User')} user
|
||||
* @returns {EreaderDeviceObject[]}
|
||||
*/
|
||||
getEReaderDevices(user) {
|
||||
return this.ereaderDevices.filter((device) => this.checkUserCanAccessDevice(device, user))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ereader device by name
|
||||
*
|
||||
* @param {string} deviceName
|
||||
* @returns {EreaderDeviceObject}
|
||||
*/
|
||||
getEReaderDevice(deviceName) {
|
||||
return this.ereaderDevices.find(d => d.name === deviceName)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ class LibrarySettings {
|
|||
this.autoScanCronExpression = null
|
||||
this.audiobooksOnly = false
|
||||
this.hideSingleBookSeries = false // Do not show series that only have 1 book
|
||||
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
|
||||
if (settings) {
|
||||
this.construct(settings)
|
||||
|
|
@ -23,6 +24,12 @@ class LibrarySettings {
|
|||
this.autoScanCronExpression = settings.autoScanCronExpression || null
|
||||
this.audiobooksOnly = !!settings.audiobooksOnly
|
||||
this.hideSingleBookSeries = !!settings.hideSingleBookSeries
|
||||
if (settings.metadataPrecedence) {
|
||||
this.metadataPrecedence = [...settings.metadataPrecedence]
|
||||
} else {
|
||||
// Added in v2.4.5
|
||||
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
|
|
@ -33,14 +40,20 @@ class LibrarySettings {
|
|||
skipMatchingMediaWithIsbn: this.skipMatchingMediaWithIsbn,
|
||||
autoScanCronExpression: this.autoScanCronExpression,
|
||||
audiobooksOnly: this.audiobooksOnly,
|
||||
hideSingleBookSeries: this.hideSingleBookSeries
|
||||
hideSingleBookSeries: this.hideSingleBookSeries,
|
||||
metadataPrecedence: [...this.metadataPrecedence]
|
||||
}
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
let hasUpdates = false
|
||||
for (const key in payload) {
|
||||
if (this[key] !== payload[key]) {
|
||||
if (key === 'metadataPrecedence') {
|
||||
if (payload[key] && Array.isArray(payload[key]) && payload[key].join() !== this[key].join()) {
|
||||
this[key] = payload[key]
|
||||
hasUpdates = true
|
||||
}
|
||||
} else if (this[key] !== payload[key]) {
|
||||
this[key] = payload[key]
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const packageJson = require('../../../package.json')
|
||||
const { BookshelfView } = require('../../utils/constants')
|
||||
const Logger = require('../../Logger')
|
||||
|
||||
|
|
@ -10,11 +11,8 @@ class ServerSettings {
|
|||
this.scannerParseSubtitle = false
|
||||
this.scannerFindCovers = false
|
||||
this.scannerCoverProvider = 'google'
|
||||
this.scannerPreferAudioMetadata = false
|
||||
this.scannerPreferOpfMetadata = false
|
||||
this.scannerPreferMatchedMetadata = false
|
||||
this.scannerDisableWatcher = false
|
||||
this.scannerPreferOverdriveMediaMarker = false
|
||||
|
||||
// Metadata - choose to store inside users library item folder
|
||||
this.storeCoverWithItem = false
|
||||
|
|
@ -53,7 +51,8 @@ class ServerSettings {
|
|||
|
||||
this.logLevel = Logger.logLevel
|
||||
|
||||
this.version = null
|
||||
this.version = packageJson.version
|
||||
this.buildNumber = packageJson.buildNumber
|
||||
|
||||
// Auth settings
|
||||
// Active auth methodes
|
||||
|
|
@ -82,11 +81,8 @@ class ServerSettings {
|
|||
this.scannerFindCovers = !!settings.scannerFindCovers
|
||||
this.scannerCoverProvider = settings.scannerCoverProvider || 'google'
|
||||
this.scannerParseSubtitle = settings.scannerParseSubtitle
|
||||
this.scannerPreferAudioMetadata = !!settings.scannerPreferAudioMetadata
|
||||
this.scannerPreferOpfMetadata = !!settings.scannerPreferOpfMetadata
|
||||
this.scannerPreferMatchedMetadata = !!settings.scannerPreferMatchedMetadata
|
||||
this.scannerDisableWatcher = !!settings.scannerDisableWatcher
|
||||
this.scannerPreferOverdriveMediaMarker = !!settings.scannerPreferOverdriveMediaMarker
|
||||
|
||||
this.storeCoverWithItem = !!settings.storeCoverWithItem
|
||||
this.storeMetadataWithItem = !!settings.storeMetadataWithItem
|
||||
|
|
@ -113,6 +109,7 @@ class ServerSettings {
|
|||
this.language = settings.language || 'en-us'
|
||||
this.logLevel = settings.logLevel || Logger.logLevel
|
||||
this.version = settings.version || null
|
||||
this.buildNumber = settings.buildNumber || 0 // Added v2.4.5
|
||||
|
||||
this.authActiveAuthMethods = settings.authActiveAuthMethods || ['local']
|
||||
this.authGoogleOauth20ClientID = settings.authGoogleOauth20ClientID || ''
|
||||
|
|
@ -173,9 +170,9 @@ class ServerSettings {
|
|||
this.metadataFileFormat = 'abs'
|
||||
}
|
||||
|
||||
// Validation
|
||||
if (!['abs', 'json'].includes(this.metadataFileFormat)) {
|
||||
Logger.error(`[ServerSettings] construct: Invalid metadataFileFormat ${this.metadataFileFormat}`)
|
||||
// As of v2.4.5 only json is supported
|
||||
if (this.metadataFileFormat !== 'json') {
|
||||
Logger.warn(`[ServerSettings] Invalid metadataFileFormat ${this.metadataFileFormat} (as of v2.4.5 only json is supported)`)
|
||||
this.metadataFileFormat = 'json'
|
||||
}
|
||||
|
||||
|
|
@ -191,11 +188,8 @@ class ServerSettings {
|
|||
scannerFindCovers: this.scannerFindCovers,
|
||||
scannerCoverProvider: this.scannerCoverProvider,
|
||||
scannerParseSubtitle: this.scannerParseSubtitle,
|
||||
scannerPreferAudioMetadata: this.scannerPreferAudioMetadata,
|
||||
scannerPreferOpfMetadata: this.scannerPreferOpfMetadata,
|
||||
scannerPreferMatchedMetadata: this.scannerPreferMatchedMetadata,
|
||||
scannerDisableWatcher: this.scannerDisableWatcher,
|
||||
scannerPreferOverdriveMediaMarker: this.scannerPreferOverdriveMediaMarker,
|
||||
storeCoverWithItem: this.storeCoverWithItem,
|
||||
storeMetadataWithItem: this.storeMetadataWithItem,
|
||||
metadataFileFormat: this.metadataFileFormat,
|
||||
|
|
@ -217,6 +211,7 @@ class ServerSettings {
|
|||
language: this.language,
|
||||
logLevel: this.logLevel,
|
||||
version: this.version,
|
||||
buildNumber: this.buildNumber,
|
||||
authActiveAuthMethods: this.authActiveAuthMethods,
|
||||
authGoogleOauth20ClientID: this.authGoogleOauth20ClientID, // Do not return to client
|
||||
authGoogleOauth20ClientSecret: this.authGoogleOauth20ClientSecret, // Do not return to client
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ class User {
|
|||
this.id = null
|
||||
this.oldUserId = null // TODO: Temp for keeping old access tokens
|
||||
this.username = null
|
||||
this.email = null
|
||||
this.pash = null
|
||||
this.type = null
|
||||
this.token = null
|
||||
|
|
@ -34,6 +35,9 @@ class User {
|
|||
get isAdmin() {
|
||||
return this.type === 'admin'
|
||||
}
|
||||
get isUser() {
|
||||
return this.type === 'user'
|
||||
}
|
||||
get isGuest() {
|
||||
return this.type === 'guest'
|
||||
}
|
||||
|
|
@ -76,6 +80,7 @@ class User {
|
|||
id: this.id,
|
||||
oldUserId: this.oldUserId,
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
pash: this.pash,
|
||||
type: this.type,
|
||||
token: this.token,
|
||||
|
|
@ -97,6 +102,7 @@ class User {
|
|||
id: this.id,
|
||||
oldUserId: this.oldUserId,
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
type: this.type,
|
||||
token: (this.type === 'root' && hideRootToken) ? '' : this.token,
|
||||
mediaProgress: this.mediaProgress ? this.mediaProgress.map(li => li.toJSON()) : [],
|
||||
|
|
@ -140,6 +146,7 @@ class User {
|
|||
this.id = user.id
|
||||
this.oldUserId = user.oldUserId
|
||||
this.username = user.username
|
||||
this.email = user.email || null
|
||||
this.pash = user.pash
|
||||
this.type = user.type
|
||||
this.token = user.token
|
||||
|
|
@ -184,7 +191,7 @@ class User {
|
|||
update(payload) {
|
||||
var hasUpdates = false
|
||||
// Update the following keys:
|
||||
const keysToCheck = ['pash', 'type', 'username', 'isActive']
|
||||
const keysToCheck = ['pash', 'type', 'username', 'email', 'isActive']
|
||||
keysToCheck.forEach((key) => {
|
||||
if (payload[key] !== undefined) {
|
||||
if (key === 'isActive' || payload[key]) { // pash, type, username must evaluate to true (cannot be null or empty)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
const sequelize = require('sequelize')
|
||||
const express = require('express')
|
||||
const Path = require('path')
|
||||
|
||||
|
|
@ -40,6 +39,7 @@ class ApiRouter {
|
|||
this.playbackSessionManager = Server.playbackSessionManager
|
||||
this.abMergeManager = Server.abMergeManager
|
||||
this.backupManager = Server.backupManager
|
||||
/** @type {import('../Watcher')} */
|
||||
this.watcher = Server.watcher
|
||||
this.podcastManager = Server.podcastManager
|
||||
this.audioMetadataManager = Server.audioMetadataManager
|
||||
|
|
@ -47,7 +47,6 @@ class ApiRouter {
|
|||
this.cronManager = Server.cronManager
|
||||
this.notificationManager = Server.notificationManager
|
||||
this.emailManager = Server.emailManager
|
||||
this.taskManager = Server.taskManager
|
||||
|
||||
this.router = express()
|
||||
this.router.disable('x-powered-by')
|
||||
|
|
@ -84,6 +83,7 @@ class ApiRouter {
|
|||
this.router.get('/libraries/:id/recent-episodes', LibraryController.middleware.bind(this), LibraryController.getRecentEpisodes.bind(this))
|
||||
this.router.get('/libraries/:id/opml', LibraryController.middleware.bind(this), LibraryController.getOPMLFile.bind(this))
|
||||
this.router.post('/libraries/order', LibraryController.reorder.bind(this))
|
||||
this.router.post('/libraries/:id/remove-metadata', LibraryController.middleware.bind(this), LibraryController.removeAllMetadataFiles.bind(this))
|
||||
|
||||
//
|
||||
// Item Routes
|
||||
|
|
@ -202,6 +202,8 @@ class ApiRouter {
|
|||
this.router.delete('/authors/:id', AuthorController.middleware.bind(this), AuthorController.delete.bind(this))
|
||||
this.router.post('/authors/:id/match', AuthorController.middleware.bind(this), AuthorController.match.bind(this))
|
||||
this.router.get('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.getImage.bind(this))
|
||||
this.router.post('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.uploadImage.bind(this))
|
||||
this.router.delete('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.deleteImage.bind(this))
|
||||
|
||||
//
|
||||
// Series Routes
|
||||
|
|
@ -253,11 +255,11 @@ class ApiRouter {
|
|||
//
|
||||
// Email Routes (Admin and up)
|
||||
//
|
||||
this.router.get('/emails/settings', EmailController.middleware.bind(this), EmailController.getSettings.bind(this))
|
||||
this.router.patch('/emails/settings', EmailController.middleware.bind(this), EmailController.updateSettings.bind(this))
|
||||
this.router.post('/emails/test', EmailController.middleware.bind(this), EmailController.sendTest.bind(this))
|
||||
this.router.post('/emails/ereader-devices', EmailController.middleware.bind(this), EmailController.updateEReaderDevices.bind(this))
|
||||
this.router.post('/emails/send-ebook-to-device', EmailController.middleware.bind(this), EmailController.sendEBookToDevice.bind(this))
|
||||
this.router.get('/emails/settings', EmailController.adminMiddleware.bind(this), EmailController.getSettings.bind(this))
|
||||
this.router.patch('/emails/settings', EmailController.adminMiddleware.bind(this), EmailController.updateSettings.bind(this))
|
||||
this.router.post('/emails/test', EmailController.adminMiddleware.bind(this), EmailController.sendTest.bind(this))
|
||||
this.router.post('/emails/ereader-devices', EmailController.adminMiddleware.bind(this), EmailController.updateEReaderDevices.bind(this))
|
||||
this.router.post('/emails/send-ebook-to-device', EmailController.sendEBookToDevice.bind(this))
|
||||
|
||||
//
|
||||
// Search Routes
|
||||
|
|
@ -308,6 +310,7 @@ class ApiRouter {
|
|||
this.router.delete('/genres/:genre', MiscController.deleteGenre.bind(this))
|
||||
this.router.post('/validate-cron', MiscController.validateCronExpression.bind(this))
|
||||
this.router.get('/auth-settings', MiscController.getAuthSettings.bind(this))
|
||||
this.router.post('/watcher/update', MiscController.updateWatchedPath.bind(this))
|
||||
}
|
||||
|
||||
async getDirectories(dir, relpath, excludedDirs, level = 0) {
|
||||
|
|
|
|||
|
|
@ -27,28 +27,60 @@ class HlsRouter {
|
|||
return Number(num_part)
|
||||
}
|
||||
|
||||
async streamFileRequest(req, res) {
|
||||
var streamId = req.params.stream
|
||||
var fullFilePath = Path.join(this.playbackSessionManager.StreamsPath, streamId, req.params.file)
|
||||
/**
|
||||
* Ensure filepath is inside streamDir
|
||||
* Used to prevent arbitrary file reads
|
||||
* @see https://nodejs.org/api/path.html#pathrelativefrom-to
|
||||
*
|
||||
* @param {string} streamDir
|
||||
* @param {string} filepath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
validateStreamFilePath(streamDir, filepath) {
|
||||
const relative = Path.relative(streamDir, filepath)
|
||||
return relative && !relative.startsWith('..') && !Path.isAbsolute(relative)
|
||||
}
|
||||
|
||||
var exists = await fs.pathExists(fullFilePath)
|
||||
if (!exists) {
|
||||
/**
|
||||
* GET /hls/:stream/:file
|
||||
* File must have extname .ts or .m3u8
|
||||
*
|
||||
* @param {express.Request} req
|
||||
* @param {express.Response} res
|
||||
*/
|
||||
async streamFileRequest(req, res) {
|
||||
const streamId = req.params.stream
|
||||
// Ensure stream is open
|
||||
const stream = this.playbackSessionManager.getStream(streamId)
|
||||
if (!stream) {
|
||||
Logger.error(`[HlsRouter] Stream "${streamId}" does not exist`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
// Ensure stream filepath is valid
|
||||
const streamDir = Path.join(this.playbackSessionManager.StreamsPath, streamId)
|
||||
const fullFilePath = Path.join(streamDir, req.params.file)
|
||||
if (!this.validateStreamFilePath(streamDir, fullFilePath)) {
|
||||
Logger.error(`[HlsRouter] Invalid file parameter "${req.params.file}"`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
const fileExt = Path.extname(req.params.file)
|
||||
if (fileExt !== '.ts' && fileExt !== '.m3u8') {
|
||||
Logger.error(`[HlsRouter] Invalid file parameter "${req.params.file}" extname. Must be .ts or .m3u8`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
if (!(await fs.pathExists(fullFilePath))) {
|
||||
Logger.warn('File path does not exist', fullFilePath)
|
||||
|
||||
var fileExt = Path.extname(req.params.file)
|
||||
if (fileExt === '.ts' || fileExt === '.m4s') {
|
||||
var segNum = this.parseSegmentFilename(req.params.file)
|
||||
var stream = this.playbackSessionManager.getStream(streamId)
|
||||
if (!stream) {
|
||||
Logger.error(`[HlsRouter] Stream ${streamId} does not exist`)
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
if (fileExt === '.ts') {
|
||||
const segNum = this.parseSegmentFilename(req.params.file)
|
||||
|
||||
if (stream.isResetting) {
|
||||
Logger.info(`[HlsRouter] Stream ${streamId} is currently resetting`)
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
var startTimeForReset = await stream.checkSegmentNumberRequest(segNum)
|
||||
const startTimeForReset = await stream.checkSegmentNumberRequest(segNum)
|
||||
if (startTimeForReset) {
|
||||
// HLS.js will restart the stream at the new time
|
||||
Logger.info(`[HlsRouter] Resetting Stream - notify client @${startTimeForReset}s`)
|
||||
|
|
@ -56,13 +88,12 @@ class HlsRouter {
|
|||
startTime: startTimeForReset,
|
||||
streamId: stream.id
|
||||
})
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
}
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
// Logger.info('Sending file', fullFilePath)
|
||||
res.sendFile(fullFilePath)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
const express = require('express')
|
||||
const libraries = require('./libraries')
|
||||
|
||||
const router = express.Router()
|
||||
|
||||
router.use('/libraries', libraries)
|
||||
|
||||
module.exports = router
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
const express = require('express')
|
||||
|
||||
const router = express.Router()
|
||||
|
||||
// TODO: Add library routes
|
||||
|
||||
module.exports = router
|
||||
82
server/scanner/AbsMetadataFileScanner.js
Normal file
82
server/scanner/AbsMetadataFileScanner.js
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
const Path = require('path')
|
||||
const fsExtra = require('../libs/fsExtra')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
|
||||
class AbsMetadataFileScanner {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* Check for metadata.json file and set book metadata
|
||||
*
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {Object} bookMetadata
|
||||
* @param {string} [existingLibraryItemId]
|
||||
*/
|
||||
async scanBookMetadataFile(libraryScan, libraryItemData, bookMetadata, existingLibraryItemId = null) {
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
metadataFilePath = Path.join(metadataPath, 'metadata.json')
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}"`)
|
||||
const abMetadata = abmetadataGenerator.parseJson(metadataText) || {}
|
||||
for (const key in abMetadata) {
|
||||
// TODO: When to override with null or empty arrays?
|
||||
if (abMetadata[key] === undefined || abMetadata[key] === null) continue
|
||||
if (key === 'tags' && !abMetadata.tags?.length) continue
|
||||
if (key === 'chapters' && !abMetadata.chapters?.length) continue
|
||||
|
||||
bookMetadata[key] = abMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for metadata.json file and set podcast metadata
|
||||
*
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {Object} podcastMetadata
|
||||
* @param {string} [existingLibraryItemId]
|
||||
*/
|
||||
async scanPodcastMetadataFile(libraryScan, libraryItemData, podcastMetadata, existingLibraryItemId = null) {
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
metadataFilePath = Path.join(metadataPath, 'metadata.json')
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}"`)
|
||||
const abMetadata = abmetadataGenerator.parseJson(metadataText) || {}
|
||||
for (const key in abMetadata) {
|
||||
if (abMetadata[key] === undefined || abMetadata[key] === null) continue
|
||||
if (key === 'tags' && !abMetadata.tags?.length) continue
|
||||
|
||||
podcastMetadata[key] = abMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = new AbsMetadataFileScanner()
|
||||
|
|
@ -1,6 +1,9 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../Logger')
|
||||
const prober = require('../utils/prober')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { parseOverdriveMediaMarkersAsChapters } = require('../utils/parsers/parseOverdriveMediaMarkers')
|
||||
const parseNameString = require('../utils/parsers/parseNameString')
|
||||
const LibraryItem = require('../models/LibraryItem')
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
|
||||
|
|
@ -205,5 +208,339 @@ class AudioFileScanner {
|
|||
Logger.debug(`[AudioFileScanner] Running ffprobe for audio file at "${audioFile.metadata.path}"`)
|
||||
return prober.rawProbe(audioFile.metadata.path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set book metadata & chapters from audio file meta tags
|
||||
*
|
||||
* @param {string} bookTitle
|
||||
* @param {import('../models/Book').AudioFileObject} audioFile
|
||||
* @param {Object} bookMetadata
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
*/
|
||||
setBookMetadataFromAudioMetaTags(bookTitle, audioFiles, bookMetadata, libraryScan) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComposer',
|
||||
key: 'narrators'
|
||||
},
|
||||
{
|
||||
tag: 'tagDescription',
|
||||
altTag: 'tagComment',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagPublisher',
|
||||
key: 'publisher'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'publishedYear'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagTitle',
|
||||
key: 'title',
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
altTag: 'tagAlbumArtist',
|
||||
key: 'authors'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagSeries',
|
||||
key: 'series'
|
||||
},
|
||||
{
|
||||
tag: 'tagIsbn',
|
||||
key: 'isbn'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagASIN',
|
||||
key: 'asin'
|
||||
}
|
||||
]
|
||||
|
||||
const firstScannedFile = audioFiles[0]
|
||||
const audioFileMetaTags = firstScannedFile.metaTags
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'narrators') {
|
||||
bookMetadata.narrators = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'authors') {
|
||||
bookMetadata.authors = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'genres') {
|
||||
bookMetadata.genres = this.parseGenresString(value)
|
||||
} else if (mapping.key === 'series') {
|
||||
bookMetadata.series = [
|
||||
{
|
||||
name: value,
|
||||
sequence: audioFileMetaTags.tagSeriesPart || null
|
||||
}
|
||||
]
|
||||
} else {
|
||||
bookMetadata[mapping.key] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Set chapters
|
||||
const chapters = this.getBookChaptersFromAudioFiles(bookTitle, audioFiles, libraryScan)
|
||||
if (chapters.length) {
|
||||
bookMetadata.chapters = chapters
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set podcast metadata from first audio file
|
||||
*
|
||||
* @param {import('../models/Book').AudioFileObject} audioFile
|
||||
* @param {Object} podcastMetadata
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
*/
|
||||
setPodcastMetadataFromAudioMetaTags(audioFile, podcastMetadata, libraryScan) {
|
||||
const audioFileMetaTags = audioFile.metaTags
|
||||
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagSeries',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
key: 'author'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagItunesId',
|
||||
key: 'itunesId'
|
||||
},
|
||||
{
|
||||
tag: 'tagPodcastType',
|
||||
key: 'podcastType',
|
||||
}
|
||||
]
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'genres') {
|
||||
podcastMetadata.genres = this.parseGenresString(value)
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata.genres.join(', ')}`)
|
||||
} else {
|
||||
podcastMetadata[mapping.key] = value
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../models/PodcastEpisode')} podcastEpisode Not the model when creating new podcast
|
||||
* @param {import('./ScanLogger')} scanLogger
|
||||
*/
|
||||
setPodcastEpisodeMetadataFromAudioMetaTags(podcastEpisode, scanLogger) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComment',
|
||||
altTag: 'tagSubtitle',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'pubDate'
|
||||
},
|
||||
{
|
||||
tag: 'tagDisc',
|
||||
key: 'season',
|
||||
},
|
||||
{
|
||||
tag: 'tagTrack',
|
||||
altTag: 'tagSeriesPart',
|
||||
key: 'episode'
|
||||
},
|
||||
{
|
||||
tag: 'tagTitle',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagEpisodeType',
|
||||
key: 'episodeType'
|
||||
}
|
||||
]
|
||||
|
||||
const audioFileMetaTags = podcastEpisode.audioFile.metaTags
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
tagToUse = mapping.altTag
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'pubDate') {
|
||||
const pubJsDate = new Date(value)
|
||||
if (pubJsDate && !isNaN(pubJsDate)) {
|
||||
podcastEpisode.publishedAt = pubJsDate.valueOf()
|
||||
podcastEpisode.pubDate = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping pubDate with tag ${tagToUse} has invalid date "${value}"`)
|
||||
}
|
||||
} else if (mapping.key === 'episodeType') {
|
||||
if (['full', 'trailer', 'bonus'].includes(value)) {
|
||||
podcastEpisode.episodeType = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping episodeType with invalid value "${value}". Must be one of [full, trailer, bonus].`)
|
||||
}
|
||||
} else {
|
||||
podcastEpisode[mapping.key] = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bookTitle
|
||||
* @param {AudioFile[]} audioFiles
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {import('../models/Book').ChapterObject[]}
|
||||
*/
|
||||
getBookChaptersFromAudioFiles(bookTitle, audioFiles, libraryScan) {
|
||||
// If overdrive media markers are present then use those instead
|
||||
const overdriveChapters = parseOverdriveMediaMarkersAsChapters(audioFiles)
|
||||
if (overdriveChapters?.length) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, 'Overdrive Media Markers and preference found! Using these for chapter definitions')
|
||||
|
||||
return overdriveChapters
|
||||
}
|
||||
|
||||
let chapters = []
|
||||
|
||||
// If first audio file has embedded chapters then use embedded chapters
|
||||
if (audioFiles[0].chapters?.length) {
|
||||
// If all files chapters are the same, then only make chapters for the first file
|
||||
if (
|
||||
audioFiles.length === 1 ||
|
||||
audioFiles.length > 1 &&
|
||||
audioFiles[0].chapters.length === audioFiles[1].chapters?.length &&
|
||||
audioFiles[0].chapters.every((c, i) => c.title === audioFiles[1].chapters[i].title)
|
||||
) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters in first audio file ${audioFiles[0].metadata?.path}`)
|
||||
chapters = audioFiles[0].chapters.map((c) => ({ ...c }))
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters from all audio files ${audioFiles[0].metadata?.path}`)
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
const afChapters = file.chapters?.map((c) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime,
|
||||
})) ?? []
|
||||
chapters = chapters.concat(afChapters)
|
||||
|
||||
currChapterId += file.chapters?.length ?? 0
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
return chapters
|
||||
}
|
||||
} else if (audioFiles.length > 1) {
|
||||
|
||||
// In some cases the ID3 title tag for each file is the chapter title, the criteria to determine if this will be used
|
||||
// 1. Every audio file has an ID3 title tag set
|
||||
// 2. None of the title tags are the same as the book title
|
||||
// 3. Every ID3 title tag is unique
|
||||
const metaTagTitlesFound = [...new Set(audioFiles.map(af => af.metaTags?.tagTitle).filter(tagTitle => !!tagTitle && tagTitle !== bookTitle))]
|
||||
const useMetaTagAsTitle = metaTagTitlesFound.length === audioFiles.length
|
||||
|
||||
// Build chapters from audio files
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
let title = file.metadata.filename ? Path.basename(file.metadata.filename, Path.extname(file.metadata.filename)) : `Chapter ${currChapterId}`
|
||||
if (useMetaTagAsTitle) {
|
||||
title = file.metaTags.tagTitle
|
||||
}
|
||||
|
||||
chapters.push({
|
||||
id: currChapterId++,
|
||||
start: currStartTime,
|
||||
end: currStartTime + file.duration,
|
||||
title
|
||||
})
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a genre string into multiple genres
|
||||
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
|
||||
*
|
||||
* @param {string} genreTag
|
||||
* @returns {string[]}
|
||||
*/
|
||||
parseGenresString(genreTag) {
|
||||
if (!genreTag?.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
}
|
||||
module.exports = new AudioFileScanner()
|
||||
|
|
@ -3,8 +3,6 @@ const Path = require('path')
|
|||
const sequelize = require('sequelize')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { getTitleIgnorePrefix, areEquivalent } = require('../utils/index')
|
||||
const { parseOpfMetadataXML } = require('../utils/parsers/parseOpfMetadata')
|
||||
const { parseOverdriveMediaMarkersAsChapters } = require('../utils/parsers/parseOverdriveMediaMarkers')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const parseNameString = require('../utils/parsers/parseNameString')
|
||||
const globals = require('../utils/globals')
|
||||
|
|
@ -16,9 +14,12 @@ const CoverManager = require('../managers/CoverManager')
|
|||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const fsExtra = require("../libs/fsExtra")
|
||||
const LibraryScan = require("./LibraryScan")
|
||||
const BookFinder = require('../finders/BookFinder')
|
||||
|
||||
const LibraryScan = require("./LibraryScan")
|
||||
const OpfFileScanner = require('./OpfFileScanner')
|
||||
const AbsMetadataFileScanner = require('./AbsMetadataFileScanner')
|
||||
|
||||
/**
|
||||
* Metadata for books pulled from files
|
||||
* @typedef BookMetadataObject
|
||||
|
|
@ -50,7 +51,7 @@ class BookScanner {
|
|||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<import('../models/LibraryItem')>}
|
||||
* @returns {Promise<{libraryItem:import('../models/LibraryItem'), wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanExistingBookLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
/** @type {import('../models/Book')} */
|
||||
|
|
@ -75,8 +76,8 @@ class BookScanner {
|
|||
]
|
||||
})
|
||||
|
||||
let hasMediaChanges = libraryItemData.hasAudioFileChanges
|
||||
if (libraryItemData.hasAudioFileChanges || libraryItemData.audioLibraryFiles.length !== media.audioFiles.length) {
|
||||
let hasMediaChanges = libraryItemData.hasAudioFileChanges || libraryItemData.audioLibraryFiles.length !== media.audioFiles.length
|
||||
if (hasMediaChanges) {
|
||||
// Filter out audio files that were removed
|
||||
media.audioFiles = media.audioFiles.filter(af => !libraryItemData.checkAudioFileRemoved(af))
|
||||
|
||||
|
|
@ -168,7 +169,7 @@ class BookScanner {
|
|||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan, existingLibraryItem.id)
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
|
||||
let authorsUpdated = false
|
||||
const bookAuthorsRemoved = []
|
||||
let seriesUpdated = false
|
||||
|
|
@ -338,6 +339,19 @@ class BookScanner {
|
|||
libraryItemUpdated = global.ServerSettings.storeMetadataWithItem && !existingLibraryItem.isFile
|
||||
}
|
||||
|
||||
// If book has no audio files and no ebook then it is considered missing
|
||||
if (!media.audioFiles.length && !media.ebookFile) {
|
||||
if (!existingLibraryItem.isMissing) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Book "${bookMetadata.title}" has no audio files and no ebook file. Setting library item as missing`)
|
||||
existingLibraryItem.isMissing = true
|
||||
libraryItemUpdated = true
|
||||
}
|
||||
} else if (existingLibraryItem.isMissing) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Book "${bookMetadata.title}" was missing but now has media files. Setting library item as NOT missing`)
|
||||
existingLibraryItem.isMissing = false
|
||||
libraryItemUpdated = true
|
||||
}
|
||||
|
||||
// Check/update the isSupplementary flag on libraryFiles for the LibraryItem
|
||||
for (const libraryFile of existingLibraryItem.libraryFiles) {
|
||||
if (globals.SupportedEbookTypes.includes(libraryFile.metadata.ext.slice(1).toLowerCase())) {
|
||||
|
|
@ -360,7 +374,10 @@ class BookScanner {
|
|||
libraryScan.seriesRemovedFromBooks.push(...bookSeriesRemoved)
|
||||
libraryScan.authorsRemovedFromBooks.push(...bookAuthorsRemoved)
|
||||
|
||||
return existingLibraryItem
|
||||
return {
|
||||
libraryItem: existingLibraryItem,
|
||||
wasUpdated: hasMediaChanges || libraryItemUpdated || seriesUpdated || authorsUpdated
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -389,7 +406,7 @@ class BookScanner {
|
|||
ebookLibraryFile.ebookFormat = ebookLibraryFile.metadata.ext.slice(1).toLowerCase()
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, libraryItemData, libraryScan)
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, libraryItemData, libraryScan, librarySettings)
|
||||
bookMetadata.explicit = !!bookMetadata.explicit // Ensure boolean
|
||||
bookMetadata.abridged = !!bookMetadata.abridged // Ensure boolean
|
||||
|
||||
|
|
@ -548,226 +565,42 @@ class BookScanner {
|
|||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {string} [existingLibraryItemId]
|
||||
* @returns {Promise<BookMetadataObject>}
|
||||
*/
|
||||
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan, existingLibraryItemId = null) {
|
||||
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan, librarySettings, existingLibraryItemId = null) {
|
||||
// First set book metadata from folder/file names
|
||||
const bookMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title,
|
||||
titleIgnorePrefix: getTitleIgnorePrefix(libraryItemData.mediaMetadata.title),
|
||||
subtitle: libraryItemData.mediaMetadata.subtitle || undefined,
|
||||
publishedYear: libraryItemData.mediaMetadata.publishedYear || undefined,
|
||||
title: libraryItemData.mediaMetadata.title, // required
|
||||
titleIgnorePrefix: undefined,
|
||||
subtitle: undefined,
|
||||
publishedYear: undefined,
|
||||
publisher: undefined,
|
||||
description: undefined,
|
||||
isbn: undefined,
|
||||
asin: undefined,
|
||||
language: undefined,
|
||||
narrators: parseNameString.parse(libraryItemData.mediaMetadata.narrators)?.names || [],
|
||||
narrators: [],
|
||||
genres: [],
|
||||
tags: [],
|
||||
authors: parseNameString.parse(libraryItemData.mediaMetadata.author)?.names || [],
|
||||
authors: [],
|
||||
series: [],
|
||||
chapters: [],
|
||||
explicit: undefined,
|
||||
abridged: undefined,
|
||||
coverPath: undefined
|
||||
}
|
||||
if (libraryItemData.mediaMetadata.series) {
|
||||
bookMetadata.series.push({
|
||||
name: libraryItemData.mediaMetadata.series,
|
||||
sequence: libraryItemData.mediaMetadata.sequence || null
|
||||
})
|
||||
}
|
||||
|
||||
// Fill in or override book metadata from audio file meta tags
|
||||
if (audioFiles.length) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComposer',
|
||||
key: 'narrators'
|
||||
},
|
||||
{
|
||||
tag: 'tagDescription',
|
||||
altTag: 'tagComment',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagPublisher',
|
||||
key: 'publisher'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'publishedYear'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagTitle',
|
||||
key: 'title',
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
altTag: 'tagAlbumArtist',
|
||||
key: 'authors'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagSeries',
|
||||
key: 'series'
|
||||
},
|
||||
{
|
||||
tag: 'tagIsbn',
|
||||
key: 'isbn'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagASIN',
|
||||
key: 'asin'
|
||||
}
|
||||
]
|
||||
const overrideExistingDetails = Database.serverSettings.scannerPreferAudioMetadata
|
||||
const firstScannedFile = audioFiles[0]
|
||||
const audioFileMetaTags = firstScannedFile.metaTags
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'narrators' && (!bookMetadata.narrators.length || overrideExistingDetails)) {
|
||||
bookMetadata.narrators = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'authors' && (!bookMetadata.authors.length || overrideExistingDetails)) {
|
||||
bookMetadata.authors = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'genres' && (!bookMetadata.genres.length || overrideExistingDetails)) {
|
||||
bookMetadata.genres = this.parseGenresString(value)
|
||||
} else if (mapping.key === 'series' && (!bookMetadata.series.length || overrideExistingDetails)) {
|
||||
bookMetadata.series = [
|
||||
{
|
||||
name: value,
|
||||
sequence: audioFileMetaTags.tagSeriesPart || null
|
||||
}
|
||||
]
|
||||
} else if (!bookMetadata[mapping.key] || overrideExistingDetails) {
|
||||
bookMetadata[mapping.key] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// If desc.txt in library item folder then use this for description
|
||||
if (libraryItemData.descTxtLibraryFile) {
|
||||
const description = await readTextFile(libraryItemData.descTxtLibraryFile.metadata.path)
|
||||
if (description.trim()) bookMetadata.description = description.trim()
|
||||
}
|
||||
|
||||
// If reader.txt in library item folder then use this for narrator
|
||||
if (libraryItemData.readerTxtLibraryFile) {
|
||||
let narrator = await readTextFile(libraryItemData.readerTxtLibraryFile.metadata.path)
|
||||
narrator = narrator.split(/\r?\n/)[0]?.trim() || '' // Only use first line
|
||||
if (narrator) {
|
||||
bookMetadata.narrators = parseNameString.parse(narrator)?.names || []
|
||||
}
|
||||
}
|
||||
|
||||
// If opf file is found look for metadata
|
||||
if (libraryItemData.metadataOpfLibraryFile) {
|
||||
const xmlText = await readTextFile(libraryItemData.metadataOpfLibraryFile.metadata.path)
|
||||
const opfMetadata = xmlText ? await parseOpfMetadataXML(xmlText) : null
|
||||
if (opfMetadata) {
|
||||
const opfMetadataOverrideDetails = Database.serverSettings.scannerPreferOpfMetadata
|
||||
for (const key in opfMetadata) {
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length && (!bookMetadata.tags.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.tags = opfMetadata.tags
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length && (!bookMetadata.genres.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.genres = opfMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (opfMetadata.authors?.length && (!bookMetadata.authors.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.authors = opfMetadata.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (opfMetadata.narrators?.length && (!bookMetadata.narrators.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.narrators = opfMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (opfMetadata.series && (!bookMetadata.series.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.series = [{
|
||||
name: opfMetadata.series,
|
||||
sequence: opfMetadata.sequence || null
|
||||
}]
|
||||
}
|
||||
} else if (opfMetadata[key] && (!bookMetadata[key] || opfMetadataOverrideDetails)) {
|
||||
bookMetadata[key] = opfMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If metadata.json or metadata.abs use this for metadata
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile || libraryItemData.metadataAbsLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
let metadataFileFormat = libraryItemData.metadataJsonLibraryFile ? 'json' : 'abs'
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
let altFormat = global.ServerSettings.metadataFileFormat === 'json' ? 'abs' : 'json'
|
||||
// First check the metadata format set in server settings, fallback to the alternate
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
} else if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.${altFormat}`))) {
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${altFormat}`)
|
||||
metadataFileFormat = altFormat
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}" - preferring`)
|
||||
let abMetadata = null
|
||||
if (metadataFileFormat === 'json') {
|
||||
abMetadata = abmetadataGenerator.parseJson(metadataText)
|
||||
const bookMetadataSourceHandler = new BookScanner.BookMetadataSourceHandler(bookMetadata, audioFiles, libraryItemData, libraryScan, existingLibraryItemId)
|
||||
const metadataPrecedence = librarySettings.metadataPrecedence || ['folderStructure', 'audioMetatags', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
libraryScan.addLog(LogLevel.DEBUG, `"${bookMetadata.title}" Getting metadata with precedence [${metadataPrecedence.join(', ')}]`)
|
||||
for (const metadataSource of metadataPrecedence) {
|
||||
if (bookMetadataSourceHandler[metadataSource]) {
|
||||
await bookMetadataSourceHandler[metadataSource]()
|
||||
} else {
|
||||
abMetadata = abmetadataGenerator.parse(metadataText, 'book')
|
||||
libraryScan.addLog(LogLevel.ERROR, `Invalid metadata source "${metadataSource}"`)
|
||||
}
|
||||
|
||||
if (abMetadata) {
|
||||
if (abMetadata.tags?.length) {
|
||||
bookMetadata.tags = abMetadata.tags
|
||||
}
|
||||
if (abMetadata.chapters?.length) {
|
||||
bookMetadata.chapters = abMetadata.chapters
|
||||
}
|
||||
for (const key in abMetadata.metadata) {
|
||||
if (abMetadata.metadata[key] === undefined) continue
|
||||
bookMetadata[key] = abMetadata.metadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set chapters from audio files if not already set
|
||||
if (!bookMetadata.chapters.length) {
|
||||
bookMetadata.chapters = this.getChaptersFromAudioFiles(bookMetadata.title, audioFiles, libraryScan)
|
||||
}
|
||||
|
||||
// Set cover from library file if one is found otherwise check audiofile
|
||||
|
|
@ -781,102 +614,76 @@ class BookScanner {
|
|||
return bookMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a genre string into multiple genres
|
||||
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
|
||||
* @param {string} genreTag
|
||||
* @returns {string[]}
|
||||
*/
|
||||
parseGenresString(genreTag) {
|
||||
if (!genreTag?.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bookTitle
|
||||
* @param {AudioFile[]} audioFiles
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {import('../models/Book').ChapterObject[]}
|
||||
*/
|
||||
getChaptersFromAudioFiles(bookTitle, audioFiles, libraryScan) {
|
||||
if (!audioFiles.length) return []
|
||||
|
||||
// If overdrive media markers are present and preferred, use those instead
|
||||
if (Database.serverSettings.scannerPreferOverdriveMediaMarker) {
|
||||
const overdriveChapters = parseOverdriveMediaMarkersAsChapters(audioFiles)
|
||||
if (overdriveChapters) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, 'Overdrive Media Markers and preference found! Using these for chapter definitions')
|
||||
|
||||
return overdriveChapters
|
||||
}
|
||||
static BookMetadataSourceHandler = class {
|
||||
/**
|
||||
*
|
||||
* @param {Object} bookMetadata
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {string} existingLibraryItemId
|
||||
*/
|
||||
constructor(bookMetadata, audioFiles, libraryItemData, libraryScan, existingLibraryItemId) {
|
||||
this.bookMetadata = bookMetadata
|
||||
this.audioFiles = audioFiles
|
||||
this.libraryItemData = libraryItemData
|
||||
this.libraryScan = libraryScan
|
||||
this.existingLibraryItemId = existingLibraryItemId
|
||||
}
|
||||
|
||||
let chapters = []
|
||||
/**
|
||||
* Metadata parsed from folder names/structure
|
||||
*/
|
||||
folderStructure() {
|
||||
this.libraryItemData.setBookMetadataFromFilenames(this.bookMetadata)
|
||||
}
|
||||
|
||||
// If first audio file has embedded chapters then use embedded chapters
|
||||
if (audioFiles[0].chapters?.length) {
|
||||
// If all files chapters are the same, then only make chapters for the first file
|
||||
if (
|
||||
audioFiles.length === 1 ||
|
||||
audioFiles.length > 1 &&
|
||||
audioFiles[0].chapters.length === audioFiles[1].chapters?.length &&
|
||||
audioFiles[0].chapters.every((c, i) => c.title === audioFiles[1].chapters[i].title)
|
||||
) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters in first audio file ${audioFiles[0].metadata?.path}`)
|
||||
chapters = audioFiles[0].chapters.map((c) => ({ ...c }))
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters from all audio files ${audioFiles[0].metadata?.path}`)
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
/**
|
||||
* Metadata from audio file meta tags
|
||||
*/
|
||||
audioMetatags() {
|
||||
if (!this.audioFiles.length) return
|
||||
// Modifies bookMetadata with metadata mapped from audio file meta tags
|
||||
const bookTitle = this.bookMetadata.title || this.libraryItemData.mediaMetadata.title
|
||||
AudioFileScanner.setBookMetadataFromAudioMetaTags(bookTitle, this.audioFiles, this.bookMetadata, this.libraryScan)
|
||||
}
|
||||
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
const afChapters = file.chapters?.map((c) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime,
|
||||
})) ?? []
|
||||
chapters = chapters.concat(afChapters)
|
||||
|
||||
currChapterId += file.chapters?.length ?? 0
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
return chapters
|
||||
/**
|
||||
* Description from desc.txt and narrator from reader.txt
|
||||
*/
|
||||
async txtFiles() {
|
||||
// If desc.txt in library item folder then use this for description
|
||||
if (this.libraryItemData.descTxtLibraryFile) {
|
||||
const description = await readTextFile(this.libraryItemData.descTxtLibraryFile.metadata.path)
|
||||
if (description.trim()) this.bookMetadata.description = description.trim()
|
||||
}
|
||||
} else if (audioFiles.length > 1) {
|
||||
const preferAudioMetadata = !!Database.serverSettings.scannerPreferAudioMetadata
|
||||
|
||||
// Build chapters from audio files
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
let title = file.metadata.filename ? Path.basename(file.metadata.filename, Path.extname(file.metadata.filename)) : `Chapter ${currChapterId}`
|
||||
|
||||
// When prefer audio metadata server setting is set then use ID3 title tag as long as it is not the same as the book title
|
||||
if (preferAudioMetadata && file.metaTags?.tagTitle && file.metaTags?.tagTitle !== bookTitle) {
|
||||
title = file.metaTags.tagTitle
|
||||
}
|
||||
|
||||
chapters.push({
|
||||
id: currChapterId++,
|
||||
start: currStartTime,
|
||||
end: currStartTime + file.duration,
|
||||
title
|
||||
})
|
||||
currStartTime += file.duration
|
||||
// If reader.txt in library item folder then use this for narrator
|
||||
if (this.libraryItemData.readerTxtLibraryFile) {
|
||||
let narrator = await readTextFile(this.libraryItemData.readerTxtLibraryFile.metadata.path)
|
||||
narrator = narrator.split(/\r?\n/)[0]?.trim() || '' // Only use first line
|
||||
if (narrator) {
|
||||
this.bookMetadata.narrators = parseNameString.parse(narrator)?.names || []
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata from opf file
|
||||
*/
|
||||
async opfFile() {
|
||||
if (!this.libraryItemData.metadataOpfLibraryFile) return
|
||||
await OpfFileScanner.scanBookOpfFile(this.libraryItemData.metadataOpfLibraryFile, this.bookMetadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata from metadata.json
|
||||
*/
|
||||
async absMetadata() {
|
||||
// If metadata.json use this for metadata
|
||||
await AbsMetadataFileScanner.scanBookMetadataFile(this.libraryScan, this.libraryItemData, this.bookMetadata, this.existingLibraryItemId)
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -896,121 +703,66 @@ class BookScanner {
|
|||
await fsExtra.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${metadataFileFormat}`)
|
||||
if (metadataFileFormat === 'json') {
|
||||
// Remove metadata.abs if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.abs`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.abs for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.abs`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.abs`)))
|
||||
}
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
|
||||
// TODO: Update to not use `metadata` so it fits the updated model
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
chapters: libraryItem.media.chapters?.map(c => ({ ...c })) || [],
|
||||
metadata: {
|
||||
title: libraryItem.media.title,
|
||||
subtitle: libraryItem.media.subtitle,
|
||||
authors: libraryItem.media.authors.map(a => a.name),
|
||||
narrators: libraryItem.media.narrators,
|
||||
series: libraryItem.media.series.map(se => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}),
|
||||
genres: libraryItem.media.genres || [],
|
||||
publishedYear: libraryItem.media.publishedYear,
|
||||
publishedDate: libraryItem.media.publishedDate,
|
||||
publisher: libraryItem.media.publisher,
|
||||
description: libraryItem.media.description,
|
||||
isbn: libraryItem.media.isbn,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
abridged: !!libraryItem.media.abridged
|
||||
}
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
} else {
|
||||
// Remove metadata.json if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.json`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.json for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.json`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.json`)))
|
||||
}
|
||||
|
||||
return abmetadataGenerator.generateFromNewModel(libraryItem, metadataFilePath).then(async (success) => {
|
||||
if (!success) {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed saving abmetadata to "${metadataFilePath}"`)
|
||||
return null
|
||||
}
|
||||
// Add metadata.abs to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.abs`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
return metadataLibraryFile
|
||||
})
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
chapters: libraryItem.media.chapters?.map(c => ({ ...c })) || [],
|
||||
title: libraryItem.media.title,
|
||||
subtitle: libraryItem.media.subtitle,
|
||||
authors: libraryItem.media.authors.map(a => a.name),
|
||||
narrators: libraryItem.media.narrators,
|
||||
series: libraryItem.media.series.map(se => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}),
|
||||
genres: libraryItem.media.genres || [],
|
||||
publishedYear: libraryItem.media.publishedYear,
|
||||
publishedDate: libraryItem.media.publishedDate,
|
||||
publisher: libraryItem.media.publisher,
|
||||
description: libraryItem.media.description,
|
||||
isbn: libraryItem.media.isbn,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
abridged: !!libraryItem.media.abridged
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class LibraryItemScanData {
|
|||
this.relPath = data.relPath
|
||||
/** @type {boolean} */
|
||||
this.isFile = data.isFile
|
||||
/** @type {{author:string, title:string, subtitle:string, series:string, sequence:string, publishedYear:string, narrators:string}} */
|
||||
/** @type {import('../utils/scandir').LibraryItemFilenameMetadata} */
|
||||
this.mediaMetadata = data.mediaMetadata
|
||||
/** @type {import('../objects/files/LibraryFile')[]} */
|
||||
this.libraryFiles = data.libraryFiles
|
||||
|
|
@ -233,10 +233,9 @@ class LibraryItemScanData {
|
|||
}
|
||||
await existingLibraryItem.save()
|
||||
return true
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Library item "${existingLibraryItem.relPath}" is up-to-date`)
|
||||
return false
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -303,5 +302,34 @@ class LibraryItemScanData {
|
|||
|
||||
return !this.ebookLibraryFiles.some(lf => lf.ino === ebookFile.ino)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set data parsed from filenames
|
||||
*
|
||||
* @param {Object} bookMetadata
|
||||
*/
|
||||
setBookMetadataFromFilenames(bookMetadata) {
|
||||
const keysToMap = ['title', 'subtitle', 'publishedYear', 'asin']
|
||||
for (const key in this.mediaMetadata) {
|
||||
if (keysToMap.includes(key) && this.mediaMetadata[key]) {
|
||||
bookMetadata[key] = this.mediaMetadata[key]
|
||||
}
|
||||
}
|
||||
|
||||
if (this.mediaMetadata.authors?.length) {
|
||||
bookMetadata.authors = this.mediaMetadata.authors
|
||||
}
|
||||
if (this.mediaMetadata.narrators?.length) {
|
||||
bookMetadata.narrators = this.mediaMetadata.narrators
|
||||
}
|
||||
if (this.mediaMetadata.seriesName) {
|
||||
bookMetadata.series = [
|
||||
{
|
||||
name: this.mediaMetadata.seriesName,
|
||||
sequence: this.mediaMetadata.seriesSequence || null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = LibraryItemScanData
|
||||
|
|
@ -21,9 +21,10 @@ class LibraryItemScanner {
|
|||
* Scan single library item
|
||||
*
|
||||
* @param {string} libraryItemId
|
||||
* @param {{relPath:string, path:string}} [renamedPaths] used by watcher when item folder was renamed
|
||||
* @returns {number} ScanResult
|
||||
*/
|
||||
async scanLibraryItem(libraryItemId) {
|
||||
async scanLibraryItem(libraryItemId, renamedPaths = null) {
|
||||
// TODO: Add task manager
|
||||
const libraryItem = await Database.libraryItemModel.findByPk(libraryItemId)
|
||||
if (!libraryItem) {
|
||||
|
|
@ -50,27 +51,25 @@ class LibraryItemScanner {
|
|||
|
||||
const scanLogger = new ScanLogger()
|
||||
scanLogger.verbose = true
|
||||
scanLogger.setData('libraryItem', libraryItemId)
|
||||
scanLogger.setData('libraryItem', renamedPaths?.relPath || libraryItem.relPath)
|
||||
|
||||
const libraryItemPath = fileUtils.filePathToPOSIX(libraryItem.path)
|
||||
const libraryItemPath = renamedPaths?.path || fileUtils.filePathToPOSIX(libraryItem.path)
|
||||
const folder = library.libraryFolders[0]
|
||||
const libraryItemScanData = await this.getLibraryItemScanData(libraryItemPath, library, folder, false)
|
||||
|
||||
if (await libraryItemScanData.checkLibraryItemData(libraryItem, scanLogger)) {
|
||||
if (libraryItemScanData.hasLibraryFileChanges || libraryItemScanData.hasPathChange) {
|
||||
const expandedLibraryItem = await this.rescanLibraryItem(libraryItem, libraryItemScanData, library.settings, scanLogger)
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(expandedLibraryItem)
|
||||
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
|
||||
let libraryItemDataUpdated = await libraryItemScanData.checkLibraryItemData(libraryItem, scanLogger)
|
||||
|
||||
await this.checkAuthorsAndSeriesRemovedFromBooks(library.id, scanLogger)
|
||||
} else {
|
||||
// TODO: Temporary while using old model to socket emit
|
||||
const oldLibraryItem = await Database.libraryItemModel.getOldById(libraryItem.id)
|
||||
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
|
||||
}
|
||||
const { libraryItem: expandedLibraryItem, wasUpdated } = await this.rescanLibraryItemMedia(libraryItem, libraryItemScanData, library.settings, scanLogger)
|
||||
if (libraryItemDataUpdated || wasUpdated) {
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(expandedLibraryItem)
|
||||
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
|
||||
|
||||
await this.checkAuthorsAndSeriesRemovedFromBooks(library.id, scanLogger)
|
||||
|
||||
return ScanResult.UPDATED
|
||||
}
|
||||
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Library item is up-to-date`)
|
||||
return ScanResult.UPTODATE
|
||||
}
|
||||
|
||||
|
|
@ -156,16 +155,14 @@ class LibraryItemScanner {
|
|||
* @param {LibraryItemScanData} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<LibraryItem>}
|
||||
* @returns {Promise<{libraryItem:LibraryItem, wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
let newLibraryItem = null
|
||||
rescanLibraryItemMedia(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
if (existingLibraryItem.mediaType === 'book') {
|
||||
newLibraryItem = await BookScanner.rescanExistingBookLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
return BookScanner.rescanExistingBookLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
} else {
|
||||
newLibraryItem = await PodcastScanner.rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
return PodcastScanner.rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
}
|
||||
return newLibraryItem
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ const date = require('../libs/dateAndTime')
|
|||
const Logger = require('../Logger')
|
||||
const Library = require('../objects/Library')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { secondsToTimestamp } = require('../utils/index')
|
||||
const { secondsToTimestamp, elapsedPretty } = require('../utils/index')
|
||||
|
||||
class LibraryScan {
|
||||
constructor() {
|
||||
|
|
@ -67,6 +67,15 @@ class LibraryScan {
|
|||
get logFilename() {
|
||||
return date.format(new Date(), 'YYYY-MM-DD') + '_' + this.id + '.txt'
|
||||
}
|
||||
get scanResultsString() {
|
||||
if (this.error) return this.error
|
||||
const strs = []
|
||||
if (this.resultsAdded) strs.push(`${this.resultsAdded} added`)
|
||||
if (this.resultsUpdated) strs.push(`${this.resultsUpdated} updated`)
|
||||
if (this.resultsMissing) strs.push(`${this.resultsMissing} missing`)
|
||||
if (!strs.length) return `Everything was up to date (${elapsedPretty(this.elapsed / 1000)})`
|
||||
return strs.join(', ') + ` (${elapsedPretty(this.elapsed / 1000)})`
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -9,9 +9,11 @@ const fileUtils = require('../utils/fileUtils')
|
|||
const scanUtils = require('../utils/scandir')
|
||||
const { LogLevel, ScanResult } = require('../utils/constants')
|
||||
const libraryFilters = require('../utils/queries/libraryFilters')
|
||||
const TaskManager = require('../managers/TaskManager')
|
||||
const LibraryItemScanner = require('./LibraryItemScanner')
|
||||
const LibraryScan = require('./LibraryScan')
|
||||
const LibraryItemScanData = require('./LibraryItemScanData')
|
||||
const Task = require('../objects/Task')
|
||||
|
||||
class LibraryScanner {
|
||||
constructor() {
|
||||
|
|
@ -19,7 +21,7 @@ class LibraryScanner {
|
|||
this.librariesScanning = []
|
||||
|
||||
this.scanningFilesChanged = false
|
||||
/** @type {import('../Watcher').PendingFileUpdate[][]} */
|
||||
/** @type {[import('../Watcher').PendingFileUpdate[], Task][]} */
|
||||
this.pendingFileUpdatesToScan = []
|
||||
}
|
||||
|
||||
|
|
@ -44,48 +46,69 @@ class LibraryScanner {
|
|||
/**
|
||||
*
|
||||
* @param {import('../objects/Library')} library
|
||||
* @param {*} options
|
||||
* @param {boolean} [forceRescan]
|
||||
*/
|
||||
async scan(library, options = {}) {
|
||||
async scan(library, forceRescan = false) {
|
||||
if (this.isLibraryScanning(library.id)) {
|
||||
Logger.error(`[Scanner] Already scanning ${library.id}`)
|
||||
Logger.error(`[LibraryScanner] Already scanning ${library.id}`)
|
||||
return
|
||||
}
|
||||
|
||||
if (!library.folders.length) {
|
||||
Logger.warn(`[Scanner] Library has no folders to scan "${library.name}"`)
|
||||
Logger.warn(`[LibraryScanner] Library has no folders to scan "${library.name}"`)
|
||||
return
|
||||
}
|
||||
|
||||
if (library.isBook && library.settings.metadataPrecedence.join() !== library.lastScanMetadataPrecedence?.join()) {
|
||||
const lastScanMetadataPrecedence = library.lastScanMetadataPrecedence?.join() || 'Unset'
|
||||
Logger.info(`[LibraryScanner] Library metadata precedence changed since last scan. From [${lastScanMetadataPrecedence}] to [${library.settings.metadataPrecedence.join()}]`)
|
||||
forceRescan = true
|
||||
}
|
||||
|
||||
const libraryScan = new LibraryScan()
|
||||
libraryScan.setData(library)
|
||||
libraryScan.verbose = true
|
||||
this.librariesScanning.push(libraryScan.getScanEmitData)
|
||||
|
||||
SocketAuthority.emitter('scan_start', libraryScan.getScanEmitData)
|
||||
const taskData = {
|
||||
libraryId: library.id,
|
||||
libraryName: library.name,
|
||||
libraryMediaType: library.mediaType
|
||||
}
|
||||
const task = TaskManager.createAndAddTask('library-scan', `Scanning "${library.name}" library`, null, true, taskData)
|
||||
|
||||
Logger.info(`[Scanner] Starting library scan ${libraryScan.id} for ${libraryScan.libraryName}`)
|
||||
Logger.info(`[LibraryScanner] Starting${forceRescan ? ' (forced)' : ''} library scan ${libraryScan.id} for ${libraryScan.libraryName}`)
|
||||
|
||||
const canceled = await this.scanLibrary(libraryScan)
|
||||
const canceled = await this.scanLibrary(libraryScan, forceRescan)
|
||||
|
||||
if (canceled) {
|
||||
Logger.info(`[Scanner] Library scan canceled for "${libraryScan.libraryName}"`)
|
||||
Logger.info(`[LibraryScanner] Library scan canceled for "${libraryScan.libraryName}"`)
|
||||
delete this.cancelLibraryScan[libraryScan.libraryId]
|
||||
}
|
||||
|
||||
libraryScan.setComplete()
|
||||
|
||||
Logger.info(`[Scanner] Library scan ${libraryScan.id} completed in ${libraryScan.elapsedTimestamp} | ${libraryScan.resultStats}`)
|
||||
Logger.info(`[LibraryScanner] Library scan ${libraryScan.id} completed in ${libraryScan.elapsedTimestamp} | ${libraryScan.resultStats}`)
|
||||
this.librariesScanning = this.librariesScanning.filter(ls => ls.id !== library.id)
|
||||
|
||||
if (canceled && !libraryScan.totalResults) {
|
||||
task.setFinished('Scan canceled')
|
||||
TaskManager.taskFinished(task)
|
||||
|
||||
const emitData = libraryScan.getScanEmitData
|
||||
emitData.results = null
|
||||
SocketAuthority.emitter('scan_complete', emitData)
|
||||
return
|
||||
}
|
||||
|
||||
SocketAuthority.emitter('scan_complete', libraryScan.getScanEmitData)
|
||||
library.lastScan = Date.now()
|
||||
library.lastScanVersion = packageJson.version
|
||||
if (library.isBook) {
|
||||
library.lastScanMetadataPrecedence = library.settings.metadataPrecedence
|
||||
}
|
||||
await Database.libraryModel.updateFromOld(library)
|
||||
|
||||
task.setFinished(libraryScan.scanResultsString)
|
||||
TaskManager.taskFinished(task)
|
||||
|
||||
if (libraryScan.totalResults) {
|
||||
libraryScan.saveLog()
|
||||
|
|
@ -95,9 +118,10 @@ class LibraryScanner {
|
|||
/**
|
||||
*
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {boolean} true if scan canceled
|
||||
* @param {boolean} forceRescan
|
||||
* @returns {Promise<boolean>} true if scan canceled
|
||||
*/
|
||||
async scanLibrary(libraryScan) {
|
||||
async scanLibrary(libraryScan, forceRescan) {
|
||||
// Make sure library filter data is set
|
||||
// this is used to check for existing authors & series
|
||||
await libraryFilters.getFilterData(libraryScan.library.mediaType, libraryScan.libraryId)
|
||||
|
|
@ -155,17 +179,25 @@ class LibraryScanner {
|
|||
}
|
||||
} else {
|
||||
libraryItemDataFound = libraryItemDataFound.filter(lidf => lidf !== libraryItemData)
|
||||
if (await libraryItemData.checkLibraryItemData(existingLibraryItem, libraryScan)) {
|
||||
libraryScan.resultsUpdated++
|
||||
if (libraryItemData.hasLibraryFileChanges || libraryItemData.hasPathChange) {
|
||||
const libraryItem = await LibraryItemScanner.rescanLibraryItem(existingLibraryItem, libraryItemData, libraryScan.library.settings, libraryScan)
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
|
||||
oldLibraryItemsUpdated.push(oldLibraryItem)
|
||||
let libraryItemDataUpdated = await libraryItemData.checkLibraryItemData(existingLibraryItem, libraryScan)
|
||||
if (libraryItemDataUpdated || forceRescan) {
|
||||
if (forceRescan || libraryItemData.hasLibraryFileChanges || libraryItemData.hasPathChange) {
|
||||
const { libraryItem, wasUpdated } = await LibraryItemScanner.rescanLibraryItemMedia(existingLibraryItem, libraryItemData, libraryScan.library.settings, libraryScan)
|
||||
if (!forceRescan || wasUpdated) {
|
||||
libraryScan.resultsUpdated++
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
|
||||
oldLibraryItemsUpdated.push(oldLibraryItem)
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Library item "${existingLibraryItem.relPath}" is up-to-date`)
|
||||
}
|
||||
} else {
|
||||
libraryScan.resultsUpdated++
|
||||
// TODO: Temporary while using old model to socket emit
|
||||
const oldLibraryItem = await Database.libraryItemModel.getOldById(existingLibraryItem.id)
|
||||
oldLibraryItemsUpdated.push(oldLibraryItem)
|
||||
}
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Library item "${existingLibraryItem.relPath}" is up-to-date`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -304,18 +336,25 @@ class LibraryScanner {
|
|||
/**
|
||||
* Scan files changed from Watcher
|
||||
* @param {import('../Watcher').PendingFileUpdate[]} fileUpdates
|
||||
* @param {Task} pendingTask
|
||||
*/
|
||||
async scanFilesChanged(fileUpdates) {
|
||||
async scanFilesChanged(fileUpdates, pendingTask) {
|
||||
if (!fileUpdates?.length) return
|
||||
|
||||
// If already scanning files from watcher then add these updates to queue
|
||||
if (this.scanningFilesChanged) {
|
||||
this.pendingFileUpdatesToScan.push(fileUpdates)
|
||||
this.pendingFileUpdatesToScan.push([fileUpdates, pendingTask])
|
||||
Logger.debug(`[LibraryScanner] Already scanning files from watcher - file updates pushed to queue (size ${this.pendingFileUpdatesToScan.length})`)
|
||||
return
|
||||
}
|
||||
this.scanningFilesChanged = true
|
||||
|
||||
const results = {
|
||||
added: 0,
|
||||
updated: 0,
|
||||
removed: 0
|
||||
}
|
||||
|
||||
// files grouped by folder
|
||||
const folderGroups = this.getFileUpdatesGrouped(fileUpdates)
|
||||
|
||||
|
|
@ -346,17 +385,42 @@ class LibraryScanner {
|
|||
const folderScanResults = await this.scanFolderUpdates(library, folder, fileUpdateGroup)
|
||||
Logger.debug(`[LibraryScanner] Folder scan results`, folderScanResults)
|
||||
|
||||
// Tally results to share with client
|
||||
let resetFilterData = false
|
||||
Object.values(folderScanResults).forEach((scanResult) => {
|
||||
if (scanResult === ScanResult.ADDED) {
|
||||
resetFilterData = true
|
||||
results.added++
|
||||
} else if (scanResult === ScanResult.REMOVED) {
|
||||
resetFilterData = true
|
||||
results.removed++
|
||||
} else if (scanResult === ScanResult.UPDATED) {
|
||||
resetFilterData = true
|
||||
results.updated++
|
||||
}
|
||||
})
|
||||
|
||||
// If something was updated then reset numIssues filter data for library
|
||||
if (Object.values(folderScanResults).some(scanResult => scanResult !== ScanResult.NOTHING && scanResult !== ScanResult.UPTODATE)) {
|
||||
if (resetFilterData) {
|
||||
await Database.resetLibraryIssuesFilterData(libraryId)
|
||||
}
|
||||
}
|
||||
|
||||
// Complete task and send results to client
|
||||
const resultStrs = []
|
||||
if (results.added) resultStrs.push(`${results.added} added`)
|
||||
if (results.updated) resultStrs.push(`${results.updated} updated`)
|
||||
if (results.removed) resultStrs.push(`${results.removed} missing`)
|
||||
let scanResultStr = 'Scan finished with no changes'
|
||||
if (resultStrs.length) scanResultStr = resultStrs.join(', ')
|
||||
pendingTask.setFinished(scanResultStr)
|
||||
TaskManager.taskFinished(pendingTask)
|
||||
|
||||
this.scanningFilesChanged = false
|
||||
|
||||
if (this.pendingFileUpdatesToScan.length) {
|
||||
Logger.debug(`[LibraryScanner] File updates finished scanning with more updates in queue (${this.pendingFileUpdatesToScan.length})`)
|
||||
this.scanFilesChanged(this.pendingFileUpdatesToScan.shift())
|
||||
this.scanFilesChanged(...this.pendingFileUpdatesToScan.shift())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -461,6 +525,7 @@ class LibraryScanner {
|
|||
path: potentialChildDirs
|
||||
})
|
||||
|
||||
let renamedPaths = {}
|
||||
if (!existingLibraryItem) {
|
||||
const dirIno = await fileUtils.getIno(fullPath)
|
||||
existingLibraryItem = await Database.libraryItemModel.findOneOld({
|
||||
|
|
@ -471,6 +536,8 @@ class LibraryScanner {
|
|||
// Update library item paths for scan
|
||||
existingLibraryItem.path = fullPath
|
||||
existingLibraryItem.relPath = itemDir
|
||||
renamedPaths.path = fullPath
|
||||
renamedPaths.relPath = itemDir
|
||||
}
|
||||
}
|
||||
if (existingLibraryItem) {
|
||||
|
|
@ -490,7 +557,7 @@ class LibraryScanner {
|
|||
|
||||
// Scan library item for updates
|
||||
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.metadata.title}" - scan for updates`)
|
||||
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id)
|
||||
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, renamedPaths)
|
||||
continue
|
||||
} else if (library.settings.audiobooksOnly && !fileUpdateGroup[itemDir].some?.(scanUtils.checkFilepathIsAudioFile)) {
|
||||
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" has no audio files`)
|
||||
|
|
|
|||
48
server/scanner/OpfFileScanner.js
Normal file
48
server/scanner/OpfFileScanner.js
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
const { parseOpfMetadataXML } = require('../utils/parsers/parseOpfMetadata')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
|
||||
class OpfFileScanner {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* Parse metadata from .opf file found in library scan and update bookMetadata
|
||||
*
|
||||
* @param {import('../models/LibraryItem').LibraryFileObject} opfLibraryFileObj
|
||||
* @param {Object} bookMetadata
|
||||
*/
|
||||
async scanBookOpfFile(opfLibraryFileObj, bookMetadata) {
|
||||
const xmlText = await readTextFile(opfLibraryFileObj.metadata.path)
|
||||
const opfMetadata = xmlText ? await parseOpfMetadataXML(xmlText) : null
|
||||
if (opfMetadata) {
|
||||
for (const key in opfMetadata) {
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length) {
|
||||
bookMetadata.tags = opfMetadata.tags
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length) {
|
||||
bookMetadata.genres = opfMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (opfMetadata.authors?.length) {
|
||||
bookMetadata.authors = opfMetadata.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (opfMetadata.narrators?.length) {
|
||||
bookMetadata.narrators = opfMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (opfMetadata.series) {
|
||||
bookMetadata.series = [{
|
||||
name: opfMetadata.series,
|
||||
sequence: opfMetadata.sequence || null
|
||||
}]
|
||||
}
|
||||
} else if (opfMetadata[key] && key !== 'sequence') {
|
||||
bookMetadata[key] = opfMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = new OpfFileScanner()
|
||||
|
|
@ -5,12 +5,13 @@ const { getTitleIgnorePrefix } = require('../utils/index')
|
|||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const AudioFileScanner = require('./AudioFileScanner')
|
||||
const Database = require('../Database')
|
||||
const { readTextFile, filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const fsExtra = require("../libs/fsExtra")
|
||||
const PodcastEpisode = require("../models/PodcastEpisode")
|
||||
const AbsMetadataFileScanner = require("./AbsMetadataFileScanner")
|
||||
|
||||
/**
|
||||
* Metadata for podcasts pulled from files
|
||||
|
|
@ -39,7 +40,7 @@ class PodcastScanner {
|
|||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {Promise<import('../models/LibraryItem')>}
|
||||
* @returns {Promise<{libraryItem:import('../models/LibraryItem'), wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
/** @type {import('../models/Podcast')} */
|
||||
|
|
@ -87,7 +88,7 @@ class PodcastScanner {
|
|||
podcastEpisode.changed('audioFile', true)
|
||||
|
||||
// Set metadata and save episode
|
||||
this.setPodcastEpisodeMetadataFromAudioFile(podcastEpisode, libraryScan)
|
||||
AudioFileScanner.setPodcastEpisodeMetadataFromAudioMetaTags(podcastEpisode, libraryScan)
|
||||
libraryScan.addLog(LogLevel.INFO, `Podcast episode "${podcastEpisode.title}" keys changed [${podcastEpisode.changed()?.join(', ')}]`)
|
||||
await podcastEpisode.save()
|
||||
}
|
||||
|
|
@ -122,7 +123,7 @@ class PodcastScanner {
|
|||
}
|
||||
const newPodcastEpisode = Database.podcastEpisodeModel.build(newEpisode)
|
||||
// Set metadata and save new episode
|
||||
this.setPodcastEpisodeMetadataFromAudioFile(newPodcastEpisode, libraryScan)
|
||||
AudioFileScanner.setPodcastEpisodeMetadataFromAudioMetaTags(newPodcastEpisode, libraryScan)
|
||||
libraryScan.addLog(LogLevel.INFO, `New Podcast episode "${newPodcastEpisode.title}" added`)
|
||||
await newPodcastEpisode.save()
|
||||
existingPodcastEpisodes.push(newPodcastEpisode)
|
||||
|
|
@ -201,7 +202,10 @@ class PodcastScanner {
|
|||
await existingLibraryItem.save()
|
||||
}
|
||||
|
||||
return existingLibraryItem
|
||||
return {
|
||||
libraryItem: existingLibraryItem,
|
||||
wasUpdated: hasMediaChanges || libraryItemUpdated
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -239,7 +243,7 @@ class PodcastScanner {
|
|||
}
|
||||
|
||||
// Set metadata and save new episode
|
||||
this.setPodcastEpisodeMetadataFromAudioFile(newEpisode, libraryScan)
|
||||
AudioFileScanner.setPodcastEpisodeMetadataFromAudioMetaTags(newEpisode, libraryScan)
|
||||
libraryScan.addLog(LogLevel.INFO, `New Podcast episode "${newEpisode.title}" found`)
|
||||
newPodcastEpisodes.push(newEpisode)
|
||||
}
|
||||
|
|
@ -317,7 +321,7 @@ class PodcastScanner {
|
|||
async getPodcastMetadataFromScanData(podcastEpisodes, libraryItemData, libraryScan, existingLibraryItemId = null) {
|
||||
const podcastMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title,
|
||||
titleIgnorePrefix: getTitleIgnorePrefix(libraryItemData.mediaMetadata.title),
|
||||
titleIgnorePrefix: undefined,
|
||||
author: undefined,
|
||||
releaseDate: undefined,
|
||||
feedURL: undefined,
|
||||
|
|
@ -333,133 +337,19 @@ class PodcastScanner {
|
|||
genres: []
|
||||
}
|
||||
|
||||
// Use audio meta tags
|
||||
if (podcastEpisodes.length) {
|
||||
const audioFileMetaTags = podcastEpisodes[0].audioFile.metaTags
|
||||
const overrideExistingDetails = Database.serverSettings.scannerPreferAudioMetadata
|
||||
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagSeries',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
key: 'author'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagItunesId',
|
||||
key: 'itunesId'
|
||||
},
|
||||
{
|
||||
tag: 'tagPodcastType',
|
||||
key: 'podcastType',
|
||||
}
|
||||
]
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'genres' && (!podcastMetadata.genres.length || overrideExistingDetails)) {
|
||||
podcastMetadata.genres = this.parseGenresString(value)
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata.genres.join(', ')}`)
|
||||
} else if (!podcastMetadata[mapping.key] || overrideExistingDetails) {
|
||||
podcastMetadata[mapping.key] = value
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
AudioFileScanner.setPodcastMetadataFromAudioMetaTags(podcastEpisodes[0].audioFile, podcastMetadata, libraryScan)
|
||||
}
|
||||
|
||||
// If metadata.json or metadata.abs use this for metadata
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile || libraryItemData.metadataAbsLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
let metadataFileFormat = libraryItemData.metadataJsonLibraryFile ? 'json' : 'abs'
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
let altFormat = global.ServerSettings.metadataFileFormat === 'json' ? 'abs' : 'json'
|
||||
// First check the metadata format set in server settings, fallback to the alternate
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
} else if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.${altFormat}`))) {
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${altFormat}`)
|
||||
metadataFileFormat = altFormat
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}" - preferring`)
|
||||
let abMetadata = null
|
||||
if (metadataFileFormat === 'json') {
|
||||
abMetadata = abmetadataGenerator.parseJson(metadataText)
|
||||
} else {
|
||||
abMetadata = abmetadataGenerator.parse(metadataText, 'podcast')
|
||||
}
|
||||
|
||||
if (abMetadata) {
|
||||
if (abMetadata.tags?.length) {
|
||||
podcastMetadata.tags = abMetadata.tags
|
||||
}
|
||||
for (const key in abMetadata.metadata) {
|
||||
if (abMetadata.metadata[key] === undefined) continue
|
||||
|
||||
// TODO: New podcast model changed some keys, need to update the abmetadataGenerator
|
||||
let newModelKey = key
|
||||
if (key === 'feedUrl') newModelKey = 'feedURL'
|
||||
else if (key === 'imageUrl') newModelKey = 'imageURL'
|
||||
else if (key === 'itunesPageUrl') newModelKey = 'itunesPageURL'
|
||||
else if (key === 'type') newModelKey = 'podcastType'
|
||||
|
||||
podcastMetadata[newModelKey] = abMetadata.metadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
// Use metadata.json file
|
||||
await AbsMetadataFileScanner.scanPodcastMetadataFile(libraryScan, libraryItemData, podcastMetadata, existingLibraryItemId)
|
||||
|
||||
podcastMetadata.titleIgnorePrefix = getTitleIgnorePrefix(podcastMetadata.title)
|
||||
|
||||
return podcastMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a genre string into multiple genres
|
||||
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
|
||||
* @param {string} genreTag
|
||||
* @returns {string[]}
|
||||
*/
|
||||
parseGenresString(genreTag) {
|
||||
if (!genreTag?.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../models/LibraryItem')} libraryItem
|
||||
|
|
@ -477,190 +367,59 @@ class PodcastScanner {
|
|||
await fsExtra.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${metadataFileFormat}`)
|
||||
if (metadataFileFormat === 'json') {
|
||||
// Remove metadata.abs if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.abs`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.abs for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.abs`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.abs`)))
|
||||
}
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
|
||||
// TODO: Update to not use `metadata` so it fits the updated model
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
metadata: {
|
||||
title: libraryItem.media.title,
|
||||
author: libraryItem.media.author,
|
||||
description: libraryItem.media.description,
|
||||
releaseDate: libraryItem.media.releaseDate,
|
||||
genres: libraryItem.media.genres || [],
|
||||
feedUrl: libraryItem.media.feedURL,
|
||||
imageUrl: libraryItem.media.imageURL,
|
||||
itunesPageUrl: libraryItem.media.itunesPageURL,
|
||||
itunesId: libraryItem.media.itunesId,
|
||||
itunesArtistId: libraryItem.media.itunesArtistId,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
type: libraryItem.media.podcastType
|
||||
}
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
} else {
|
||||
// Remove metadata.json if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.json`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.json for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.json`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.json`)))
|
||||
}
|
||||
|
||||
return abmetadataGenerator.generateFromNewModel(libraryItem, metadataFilePath).then(async (success) => {
|
||||
if (!success) {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed saving abmetadata to "${metadataFilePath}"`)
|
||||
return null
|
||||
}
|
||||
// Add metadata.abs to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.abs`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
return metadataLibraryFile
|
||||
})
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
title: libraryItem.media.title,
|
||||
author: libraryItem.media.author,
|
||||
description: libraryItem.media.description,
|
||||
releaseDate: libraryItem.media.releaseDate,
|
||||
genres: libraryItem.media.genres || [],
|
||||
feedURL: libraryItem.media.feedURL,
|
||||
imageURL: libraryItem.media.imageURL,
|
||||
itunesPageURL: libraryItem.media.itunesPageURL,
|
||||
itunesId: libraryItem.media.itunesId,
|
||||
itunesArtistId: libraryItem.media.itunesArtistId,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
podcastType: libraryItem.media.podcastType
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PodcastEpisode} podcastEpisode Not the model when creating new podcast
|
||||
* @param {import('./ScanLogger')} scanLogger
|
||||
*/
|
||||
setPodcastEpisodeMetadataFromAudioFile(podcastEpisode, scanLogger) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComment',
|
||||
altTag: 'tagSubtitle',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'pubDate'
|
||||
},
|
||||
{
|
||||
tag: 'tagDisc',
|
||||
key: 'season',
|
||||
},
|
||||
{
|
||||
tag: 'tagTrack',
|
||||
altTag: 'tagSeriesPart',
|
||||
key: 'episode'
|
||||
},
|
||||
{
|
||||
tag: 'tagTitle',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagEpisodeType',
|
||||
key: 'episodeType'
|
||||
}
|
||||
]
|
||||
|
||||
const audioFileMetaTags = podcastEpisode.audioFile.metaTags
|
||||
const overrideExistingDetails = Database.serverSettings.scannerPreferAudioMetadata
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
tagToUse = mapping.altTag
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'pubDate' && (!podcastEpisode.pubDate || overrideExistingDetails)) {
|
||||
const pubJsDate = new Date(value)
|
||||
if (pubJsDate && !isNaN(pubJsDate)) {
|
||||
podcastEpisode.publishedAt = pubJsDate.valueOf()
|
||||
podcastEpisode.pubDate = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping pubDate with tag ${tagToUse} has invalid date "${value}"`)
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
} else if (mapping.key === 'episodeType' && (!podcastEpisode.episodeType || overrideExistingDetails)) {
|
||||
if (['full', 'trailer', 'bonus'].includes(value)) {
|
||||
podcastEpisode.episodeType = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping episodeType with invalid value "${value}". Must be one of [full, trailer, bonus].`)
|
||||
}
|
||||
} else if (!podcastEpisode[mapping.key] || overrideExistingDetails) {
|
||||
podcastEpisode[mapping.key] = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ const Author = require('../objects/entities/Author')
|
|||
const Series = require('../objects/entities/Series')
|
||||
const LibraryScanner = require('./LibraryScanner')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
const TaskManager = require('../managers/TaskManager')
|
||||
|
||||
class Scanner {
|
||||
constructor() { }
|
||||
|
|
@ -280,6 +281,14 @@ class Scanner {
|
|||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick match library items
|
||||
*
|
||||
* @param {import('../objects/Library')} library
|
||||
* @param {import('../objects/LibraryItem')[]} libraryItems
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<boolean>} false if scan canceled
|
||||
*/
|
||||
async matchLibraryItemsChunk(library, libraryItems, libraryScan) {
|
||||
for (let i = 0; i < libraryItems.length; i++) {
|
||||
const libraryItem = libraryItems[i]
|
||||
|
|
@ -313,6 +322,11 @@ class Scanner {
|
|||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick match all library items for library
|
||||
*
|
||||
* @param {import('../objects/Library')} library
|
||||
*/
|
||||
async matchLibraryItems(library) {
|
||||
if (library.mediaType === 'podcast') {
|
||||
Logger.error(`[Scanner] matchLibraryItems: Match all not supported for podcasts yet`)
|
||||
|
|
@ -330,11 +344,14 @@ class Scanner {
|
|||
const libraryScan = new LibraryScan()
|
||||
libraryScan.setData(library, 'match')
|
||||
LibraryScanner.librariesScanning.push(libraryScan.getScanEmitData)
|
||||
SocketAuthority.emitter('scan_start', libraryScan.getScanEmitData)
|
||||
|
||||
const taskData = {
|
||||
libraryId: library.id
|
||||
}
|
||||
const task = TaskManager.createAndAddTask('library-match-all', `Matching books in "${library.name}"`, null, true, taskData)
|
||||
Logger.info(`[Scanner] matchLibraryItems: Starting library match scan ${libraryScan.id} for ${libraryScan.libraryName}`)
|
||||
|
||||
let hasMoreChunks = true
|
||||
let isCanceled = false
|
||||
while (hasMoreChunks) {
|
||||
const libraryItems = await Database.libraryItemModel.getLibraryItemsIncrement(offset, limit, { libraryId: library.id })
|
||||
if (!libraryItems.length) {
|
||||
|
|
@ -347,6 +364,7 @@ class Scanner {
|
|||
|
||||
const shouldContinue = await this.matchLibraryItemsChunk(library, oldLibraryItems, libraryScan)
|
||||
if (!shouldContinue) {
|
||||
isCanceled = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
|
@ -354,13 +372,15 @@ class Scanner {
|
|||
if (offset === 0) {
|
||||
Logger.error(`[Scanner] matchLibraryItems: Library has no items ${library.id}`)
|
||||
libraryScan.setComplete('Library has no items')
|
||||
task.setFailed(libraryScan.error)
|
||||
} else {
|
||||
libraryScan.setComplete()
|
||||
task.setFinished(isCanceled ? 'Canceled' : libraryScan.scanResultsString)
|
||||
}
|
||||
|
||||
delete LibraryScanner.cancelLibraryScan[libraryScan.libraryId]
|
||||
LibraryScanner.librariesScanning = LibraryScanner.librariesScanning.filter(ls => ls.id !== library.id)
|
||||
SocketAuthority.emitter('scan_complete', libraryScan.getScanEmitData)
|
||||
TaskManager.taskFinished(task)
|
||||
}
|
||||
}
|
||||
module.exports = new Scanner()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
const fs = require('../libs/fsExtra')
|
||||
const rra = require('../libs/recursiveReaddirAsync')
|
||||
const axios = require('axios')
|
||||
const Path = require('path')
|
||||
const ssrfFilter = require('ssrf-req-filter')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const rra = require('../libs/recursiveReaddirAsync')
|
||||
const Logger = require('../Logger')
|
||||
const { AudioMimeType } = require('./constants')
|
||||
|
||||
|
|
@ -18,22 +19,33 @@ const filePathToPOSIX = (path) => {
|
|||
}
|
||||
module.exports.filePathToPOSIX = filePathToPOSIX
|
||||
|
||||
async function getFileStat(path) {
|
||||
/**
|
||||
* Check path is a child of or equal to another path
|
||||
*
|
||||
* @param {string} parentPath
|
||||
* @param {string} childPath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isSameOrSubPath(parentPath, childPath) {
|
||||
parentPath = filePathToPOSIX(parentPath)
|
||||
childPath = filePathToPOSIX(childPath)
|
||||
if (parentPath === childPath) return true
|
||||
const relativePath = Path.relative(parentPath, childPath)
|
||||
return (
|
||||
relativePath === '' // Same path (e.g. parentPath = '/a/b/', childPath = '/a/b')
|
||||
|| !relativePath.startsWith('..') && !Path.isAbsolute(relativePath) // Sub path
|
||||
)
|
||||
}
|
||||
module.exports.isSameOrSubPath = isSameOrSubPath
|
||||
|
||||
function getFileStat(path) {
|
||||
try {
|
||||
var stat = await fs.stat(path)
|
||||
return {
|
||||
size: stat.size,
|
||||
atime: stat.atime,
|
||||
mtime: stat.mtime,
|
||||
ctime: stat.ctime,
|
||||
birthtime: stat.birthtime
|
||||
}
|
||||
return fs.stat(path)
|
||||
} catch (err) {
|
||||
Logger.error('[fileUtils] Failed to stat', err)
|
||||
return false
|
||||
return null
|
||||
}
|
||||
}
|
||||
module.exports.getFileStat = getFileStat
|
||||
|
||||
async function getFileTimestampsWithIno(path) {
|
||||
try {
|
||||
|
|
@ -52,12 +64,25 @@ async function getFileTimestampsWithIno(path) {
|
|||
}
|
||||
module.exports.getFileTimestampsWithIno = getFileTimestampsWithIno
|
||||
|
||||
async function getFileSize(path) {
|
||||
var stat = await getFileStat(path)
|
||||
if (!stat) return 0
|
||||
return stat.size || 0
|
||||
/**
|
||||
* Get file size
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
module.exports.getFileSize = async (path) => {
|
||||
return (await getFileStat(path))?.size || 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file mtimeMs
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns {Promise<number>} epoch timestamp
|
||||
*/
|
||||
module.exports.getFileMTimeMs = async (path) => {
|
||||
return (await getFileStat(path))?.mtimeMs || 0
|
||||
}
|
||||
module.exports.getFileSize = getFileSize
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
@ -203,15 +228,32 @@ async function recurseFiles(path, relPathToReplace = null) {
|
|||
}
|
||||
module.exports.recurseFiles = recurseFiles
|
||||
|
||||
module.exports.downloadFile = (url, filepath) => {
|
||||
/**
|
||||
* Download file from web to local file system
|
||||
* Uses SSRF filter to prevent internal URLs
|
||||
*
|
||||
* @param {string} url
|
||||
* @param {string} filepath path to download the file to
|
||||
* @param {Function} [contentTypeFilter] validate content type before writing
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.downloadFile = (url, filepath, contentTypeFilter = null) => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
Logger.debug(`[fileUtils] Downloading file to ${filepath}`)
|
||||
axios({
|
||||
url,
|
||||
method: 'GET',
|
||||
responseType: 'stream',
|
||||
timeout: 30000
|
||||
timeout: 30000,
|
||||
httpAgent: ssrfFilter(url),
|
||||
httpsAgent: ssrfFilter(url)
|
||||
}).then((response) => {
|
||||
// Validate content type
|
||||
if (contentTypeFilter && !contentTypeFilter?.(response.headers?.['content-type'])) {
|
||||
return reject(new Error(`Invalid content type "${response.headers?.['content-type'] || ''}"`))
|
||||
}
|
||||
|
||||
// Write to filepath
|
||||
const writer = fs.createWriteStream(filepath)
|
||||
response.data.pipe(writer)
|
||||
|
||||
|
|
@ -224,6 +266,21 @@ module.exports.downloadFile = (url, filepath) => {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Download image file from web to local file system
|
||||
* Response header must have content-type of image/ (excluding svg)
|
||||
*
|
||||
* @param {string} url
|
||||
* @param {string} filepath
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.downloadImageFile = (url, filepath) => {
|
||||
const contentTypeFilter = (contentType) => {
|
||||
return contentType?.startsWith('image/') && contentType !== 'image/svg+xml'
|
||||
}
|
||||
return this.downloadFile(url, filepath, contentTypeFilter)
|
||||
}
|
||||
|
||||
module.exports.sanitizeFilename = (filename, colonReplacement = ' - ') => {
|
||||
if (typeof filename !== 'string') {
|
||||
return false
|
||||
|
|
|
|||
|
|
@ -1,461 +1,26 @@
|
|||
const fs = require('../../libs/fsExtra')
|
||||
const package = require('../../../package.json')
|
||||
const Logger = require('../../Logger')
|
||||
const { getId } = require('../index')
|
||||
const areEquivalent = require('../areEquivalent')
|
||||
|
||||
|
||||
const CurrentAbMetadataVersion = 2
|
||||
// abmetadata v1 key map
|
||||
// const bookKeyMap = {
|
||||
// title: 'title',
|
||||
// subtitle: 'subtitle',
|
||||
// author: 'authorFL',
|
||||
// narrator: 'narratorFL',
|
||||
// publishedYear: 'publishedYear',
|
||||
// publisher: 'publisher',
|
||||
// description: 'description',
|
||||
// isbn: 'isbn',
|
||||
// asin: 'asin',
|
||||
// language: 'language',
|
||||
// genres: 'genresCommaSeparated'
|
||||
// }
|
||||
|
||||
const commaSeparatedToArray = (v) => {
|
||||
if (!v) return []
|
||||
return [...new Set(v.split(',').map(_v => _v.trim()).filter(_v => _v))]
|
||||
}
|
||||
|
||||
const podcastMetadataMapper = {
|
||||
title: {
|
||||
to: (m) => m.title || '',
|
||||
from: (v) => v || ''
|
||||
},
|
||||
author: {
|
||||
to: (m) => m.author || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
language: {
|
||||
to: (m) => m.language || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
genres: {
|
||||
to: (m) => m.genres?.join(', ') || '',
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
feedUrl: {
|
||||
to: (m) => m.feedUrl || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
itunesId: {
|
||||
to: (m) => m.itunesId || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
explicit: {
|
||||
to: (m) => m.explicit ? 'Y' : 'N',
|
||||
from: (v) => v && v.toLowerCase() == 'y'
|
||||
}
|
||||
}
|
||||
|
||||
const bookMetadataMapper = {
|
||||
title: {
|
||||
to: (m) => m.title || '',
|
||||
from: (v) => v || ''
|
||||
},
|
||||
subtitle: {
|
||||
to: (m) => m.subtitle || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
authors: {
|
||||
to: (m) => {
|
||||
if (m.authorName !== undefined) return m.authorName
|
||||
if (!m.authors?.length) return ''
|
||||
return m.authors.map(au => au.name).join(', ')
|
||||
},
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
narrators: {
|
||||
to: (m) => m.narrators?.join(', ') || '',
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
publishedYear: {
|
||||
to: (m) => m.publishedYear || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
publisher: {
|
||||
to: (m) => m.publisher || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
isbn: {
|
||||
to: (m) => m.isbn || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
asin: {
|
||||
to: (m) => m.asin || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
language: {
|
||||
to: (m) => m.language || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
genres: {
|
||||
to: (m) => m.genres?.join(', ') || '',
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
series: {
|
||||
to: (m) => {
|
||||
if (m.seriesName !== undefined) return m.seriesName
|
||||
if (!m.series?.length) return ''
|
||||
return m.series.map((se) => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}).join(', ')
|
||||
},
|
||||
from: (v) => {
|
||||
return commaSeparatedToArray(v).map(series => { // Return array of { name, sequence }
|
||||
let sequence = null
|
||||
let name = series
|
||||
// Series sequence match any characters after " #" other than whitespace and another #
|
||||
// e.g. "Name #1a" is valid. "Name #1#a" or "Name #1 a" is not valid.
|
||||
const matchResults = series.match(/ #([^#\s]+)$/) // Pull out sequence #
|
||||
if (matchResults && matchResults.length && matchResults.length > 1) {
|
||||
sequence = matchResults[1] // Group 1
|
||||
name = series.replace(matchResults[0], '')
|
||||
}
|
||||
return {
|
||||
name,
|
||||
sequence
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
explicit: {
|
||||
to: (m) => m.explicit ? 'Y' : 'N',
|
||||
from: (v) => v && v.toLowerCase() == 'y'
|
||||
},
|
||||
abridged: {
|
||||
to: (m) => m.abridged ? 'Y' : 'N',
|
||||
from: (v) => v && v.toLowerCase() == 'y'
|
||||
}
|
||||
}
|
||||
|
||||
const metadataMappers = {
|
||||
book: bookMetadataMapper,
|
||||
podcast: podcastMetadataMapper
|
||||
}
|
||||
|
||||
function generate(libraryItem, outputPath) {
|
||||
let fileString = `;ABMETADATA${CurrentAbMetadataVersion}\n`
|
||||
fileString += `#audiobookshelf v${package.version}\n\n`
|
||||
|
||||
const mediaType = libraryItem.mediaType
|
||||
|
||||
fileString += `media=${mediaType}\n`
|
||||
fileString += `tags=${JSON.stringify(libraryItem.media.tags)}\n`
|
||||
|
||||
const metadataMapper = metadataMappers[mediaType]
|
||||
var mediaMetadata = libraryItem.media.metadata
|
||||
for (const key in metadataMapper) {
|
||||
fileString += `${key}=${metadataMapper[key].to(mediaMetadata)}\n`
|
||||
}
|
||||
|
||||
// Description block
|
||||
if (mediaMetadata.description) {
|
||||
fileString += '\n[DESCRIPTION]\n'
|
||||
fileString += mediaMetadata.description + '\n'
|
||||
}
|
||||
|
||||
// Book chapters
|
||||
if (libraryItem.mediaType == 'book' && libraryItem.media.chapters.length) {
|
||||
fileString += '\n'
|
||||
libraryItem.media.chapters.forEach((chapter) => {
|
||||
fileString += `[CHAPTER]\n`
|
||||
fileString += `start=${chapter.start}\n`
|
||||
fileString += `end=${chapter.end}\n`
|
||||
fileString += `title=${chapter.title}\n`
|
||||
})
|
||||
}
|
||||
return fs.writeFile(outputPath, fileString).then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetaFileGenerator] Failed to save abs file`, error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
module.exports.generate = generate
|
||||
|
||||
function generateFromNewModel(libraryItem, outputPath) {
|
||||
let fileString = `;ABMETADATA${CurrentAbMetadataVersion}\n`
|
||||
fileString += `#audiobookshelf v${package.version}\n\n`
|
||||
|
||||
const mediaType = libraryItem.mediaType
|
||||
|
||||
fileString += `media=${mediaType}\n`
|
||||
fileString += `tags=${JSON.stringify(libraryItem.media.tags || '')}\n`
|
||||
|
||||
const metadataMapper = metadataMappers[mediaType]
|
||||
for (const key in metadataMapper) {
|
||||
fileString += `${key}=${metadataMapper[key].to(libraryItem.media)}\n`
|
||||
}
|
||||
|
||||
// Description block
|
||||
if (libraryItem.media.description) {
|
||||
fileString += '\n[DESCRIPTION]\n'
|
||||
fileString += libraryItem.media.description + '\n'
|
||||
}
|
||||
|
||||
// Book chapters
|
||||
if (mediaType == 'book' && libraryItem.media.chapters?.length) {
|
||||
fileString += '\n'
|
||||
libraryItem.media.chapters.forEach((chapter) => {
|
||||
fileString += `[CHAPTER]\n`
|
||||
fileString += `start=${chapter.start}\n`
|
||||
fileString += `end=${chapter.end}\n`
|
||||
fileString += `title=${chapter.title}\n`
|
||||
})
|
||||
}
|
||||
return fs.writeFile(outputPath, fileString).then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetaFileGenerator] Failed to save abs file`, error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
module.exports.generateFromNewModel = generateFromNewModel
|
||||
|
||||
function parseSections(lines) {
|
||||
if (!lines || !lines.length || !lines[0].startsWith('[')) { // First line must be section start
|
||||
return []
|
||||
}
|
||||
|
||||
var sections = []
|
||||
var currentSection = []
|
||||
lines.forEach(line => {
|
||||
if (!line || !line.trim()) return
|
||||
|
||||
if (line.startsWith('[') && currentSection.length) { // current section ended
|
||||
sections.push(currentSection)
|
||||
currentSection = []
|
||||
}
|
||||
|
||||
currentSection.push(line)
|
||||
})
|
||||
if (currentSection.length) sections.push(currentSection)
|
||||
return sections
|
||||
}
|
||||
|
||||
// lines inside chapter section
|
||||
function parseChapterLines(lines) {
|
||||
var chapter = {
|
||||
start: null,
|
||||
end: null,
|
||||
title: null
|
||||
}
|
||||
|
||||
lines.forEach((line) => {
|
||||
var keyValue = line.split('=')
|
||||
if (keyValue.length > 1) {
|
||||
var key = keyValue[0].trim()
|
||||
var value = keyValue[1].trim()
|
||||
|
||||
if (key === 'start' || key === 'end') {
|
||||
if (!isNaN(value)) {
|
||||
chapter[key] = Number(value)
|
||||
} else {
|
||||
Logger.warn(`[abmetadataGenerator] Invalid chapter value for ${key}: ${value}`)
|
||||
}
|
||||
} else if (key === 'title') {
|
||||
chapter[key] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (chapter.start === null || chapter.end === null || chapter.end < chapter.start) {
|
||||
Logger.warn(`[abmetadataGenerator] Invalid chapter`)
|
||||
return null
|
||||
}
|
||||
return chapter
|
||||
}
|
||||
|
||||
function parseTags(value) {
|
||||
if (!value) return null
|
||||
try {
|
||||
const parsedTags = []
|
||||
JSON.parse(value).forEach((loadedTag) => {
|
||||
if (loadedTag.trim()) parsedTags.push(loadedTag) // Only push tags that are non-empty
|
||||
})
|
||||
return parsedTags
|
||||
} catch (err) {
|
||||
Logger.error(`[abmetadataGenerator] Error parsing TAGS "${value}":`, err.message)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function parseAbMetadataText(text, mediaType) {
|
||||
if (!text) return null
|
||||
let lines = text.split(/\r?\n/)
|
||||
|
||||
// Check first line and get abmetadata version number
|
||||
const firstLine = lines.shift().toLowerCase()
|
||||
if (!firstLine.startsWith(';abmetadata')) {
|
||||
Logger.error(`Invalid abmetadata file first line is not ;abmetadata "${firstLine}"`)
|
||||
return null
|
||||
}
|
||||
const abmetadataVersion = Number(firstLine.replace(';abmetadata', '').trim())
|
||||
if (isNaN(abmetadataVersion) || abmetadataVersion != CurrentAbMetadataVersion) {
|
||||
Logger.warn(`Invalid abmetadata version ${abmetadataVersion} - must use version ${CurrentAbMetadataVersion}`)
|
||||
return null
|
||||
}
|
||||
|
||||
// Remove comments and empty lines
|
||||
const ignoreFirstChars = [' ', '#', ';'] // Ignore any line starting with the following
|
||||
lines = lines.filter(line => !!line.trim() && !ignoreFirstChars.includes(line[0]))
|
||||
|
||||
// Get lines that map to book details (all lines before the first chapter or description section)
|
||||
const firstSectionLine = lines.findIndex(l => l.startsWith('['))
|
||||
const detailLines = firstSectionLine > 0 ? lines.slice(0, firstSectionLine) : lines
|
||||
const remainingLines = firstSectionLine > 0 ? lines.slice(firstSectionLine) : []
|
||||
|
||||
if (!detailLines.length) {
|
||||
Logger.error(`Invalid abmetadata file no detail lines`)
|
||||
return null
|
||||
}
|
||||
|
||||
// Check the media type saved for this abmetadata file show warning if not matching expected
|
||||
if (detailLines[0].toLowerCase().startsWith('media=')) {
|
||||
const mediaLine = detailLines.shift() // Remove media line
|
||||
const abMediaType = mediaLine.toLowerCase().split('=')[1].trim()
|
||||
if (abMediaType != mediaType) {
|
||||
Logger.warn(`Invalid media type in abmetadata file ${abMediaType} expecting ${mediaType}`)
|
||||
}
|
||||
} else {
|
||||
Logger.warn(`No media type found in abmetadata file - expecting ${mediaType}`)
|
||||
}
|
||||
|
||||
const metadataMapper = metadataMappers[mediaType]
|
||||
// Put valid book detail values into map
|
||||
const mediaDetails = {
|
||||
metadata: {},
|
||||
chapters: [],
|
||||
tags: null // When tags are null it will not be used
|
||||
}
|
||||
|
||||
for (let i = 0; i < detailLines.length; i++) {
|
||||
const line = detailLines[i]
|
||||
const keyValue = line.split('=')
|
||||
if (keyValue.length < 2) {
|
||||
Logger.warn('abmetadata invalid line has no =', line)
|
||||
} else if (keyValue[0].trim() === 'tags') { // Parse tags
|
||||
const value = keyValue.slice(1).join('=').trim() // Everything after "tags="
|
||||
mediaDetails.tags = parseTags(value)
|
||||
} else if (!metadataMapper[keyValue[0].trim()]) { // Ensure valid media metadata key
|
||||
Logger.warn(`abmetadata key "${keyValue[0].trim()}" is not a valid ${mediaType} metadata key`)
|
||||
} else {
|
||||
const key = keyValue.shift().trim()
|
||||
const value = keyValue.join('=').trim()
|
||||
mediaDetails.metadata[key] = metadataMapper[key].from(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse sections for description and chapters
|
||||
const sections = parseSections(remainingLines)
|
||||
sections.forEach((section) => {
|
||||
const sectionHeader = section.shift()
|
||||
if (sectionHeader.toLowerCase().startsWith('[description]')) {
|
||||
mediaDetails.metadata.description = section.join('\n')
|
||||
} else if (sectionHeader.toLowerCase().startsWith('[chapter]')) {
|
||||
const chapter = parseChapterLines(section)
|
||||
if (chapter) {
|
||||
mediaDetails.chapters.push(chapter)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
mediaDetails.chapters.sort((a, b) => a.start - b.start)
|
||||
|
||||
if (mediaDetails.chapters.length) {
|
||||
mediaDetails.chapters = cleanChaptersArray(mediaDetails.chapters, mediaDetails.metadata.title) || []
|
||||
}
|
||||
|
||||
return mediaDetails
|
||||
}
|
||||
module.exports.parse = parseAbMetadataText
|
||||
|
||||
function checkUpdatedBookAuthors(abmetadataAuthors, authors) {
|
||||
const finalAuthors = []
|
||||
let hasUpdates = false
|
||||
|
||||
abmetadataAuthors.forEach((authorName) => {
|
||||
const findAuthor = authors.find(au => au.name.toLowerCase() == authorName.toLowerCase())
|
||||
if (!findAuthor) {
|
||||
hasUpdates = true
|
||||
finalAuthors.push({
|
||||
id: getId('new'), // New author gets created in Scanner.js after library scan
|
||||
name: authorName
|
||||
})
|
||||
} else {
|
||||
finalAuthors.push(findAuthor)
|
||||
}
|
||||
})
|
||||
|
||||
var authorsRemoved = authors.filter(au => !abmetadataAuthors.some(auname => auname.toLowerCase() == au.name.toLowerCase()))
|
||||
if (authorsRemoved.length) {
|
||||
hasUpdates = true
|
||||
}
|
||||
|
||||
return {
|
||||
authors: finalAuthors,
|
||||
hasUpdates
|
||||
}
|
||||
}
|
||||
|
||||
function checkUpdatedBookSeries(abmetadataSeries, series) {
|
||||
var finalSeries = []
|
||||
var hasUpdates = false
|
||||
|
||||
abmetadataSeries.forEach((seriesObj) => {
|
||||
var findSeries = series.find(se => se.name.toLowerCase() == seriesObj.name.toLowerCase())
|
||||
if (!findSeries) {
|
||||
hasUpdates = true
|
||||
finalSeries.push({
|
||||
id: getId('new'), // New series gets created in Scanner.js after library scan
|
||||
name: seriesObj.name,
|
||||
sequence: seriesObj.sequence
|
||||
})
|
||||
} else if (findSeries.sequence != seriesObj.sequence) { // Sequence was updated
|
||||
hasUpdates = true
|
||||
finalSeries.push({
|
||||
id: findSeries.id,
|
||||
name: findSeries.name,
|
||||
sequence: seriesObj.sequence
|
||||
})
|
||||
} else {
|
||||
finalSeries.push(findSeries)
|
||||
}
|
||||
})
|
||||
|
||||
var seriesRemoved = series.filter(se => !abmetadataSeries.some(_se => _se.name.toLowerCase() == se.name.toLowerCase()))
|
||||
if (seriesRemoved.length) {
|
||||
hasUpdates = true
|
||||
}
|
||||
|
||||
return {
|
||||
series: finalSeries,
|
||||
hasUpdates
|
||||
}
|
||||
}
|
||||
|
||||
function checkArraysChanged(abmetadataArray, mediaArray) {
|
||||
if (!Array.isArray(abmetadataArray)) return false
|
||||
if (!Array.isArray(mediaArray)) return true
|
||||
return abmetadataArray.join(',') != mediaArray.join(',')
|
||||
}
|
||||
|
||||
function parseJsonMetadataText(text) {
|
||||
try {
|
||||
const abmetadataData = JSON.parse(text)
|
||||
if (!abmetadataData.metadata) abmetadataData.metadata = {}
|
||||
|
||||
if (abmetadataData.metadata.series?.length) {
|
||||
abmetadataData.metadata.series = [...new Set(abmetadataData.metadata.series.map(t => t?.trim()).filter(t => t))]
|
||||
abmetadataData.metadata.series = abmetadataData.metadata.series.map(series => {
|
||||
// Old metadata.json used nested "metadata"
|
||||
if (abmetadataData.metadata) {
|
||||
for (const key in abmetadataData.metadata) {
|
||||
if (abmetadataData.metadata[key] === undefined) continue
|
||||
let newModelKey = key
|
||||
if (key === 'feedUrl') newModelKey = 'feedURL'
|
||||
else if (key === 'imageUrl') newModelKey = 'imageURL'
|
||||
else if (key === 'itunesPageUrl') newModelKey = 'itunesPageURL'
|
||||
else if (key === 'type') newModelKey = 'podcastType'
|
||||
abmetadataData[newModelKey] = abmetadataData.metadata[key]
|
||||
}
|
||||
}
|
||||
delete abmetadataData.metadata
|
||||
|
||||
if (abmetadataData.series?.length) {
|
||||
abmetadataData.series = [...new Set(abmetadataData.series.map(t => t?.trim()).filter(t => t))]
|
||||
abmetadataData.series = abmetadataData.series.map(series => {
|
||||
let sequence = null
|
||||
let name = series
|
||||
// Series sequence match any characters after " #" other than whitespace and another #
|
||||
|
|
@ -476,17 +41,17 @@ function parseJsonMetadataText(text) {
|
|||
abmetadataData.tags = [...new Set(abmetadataData.tags.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
if (abmetadataData.chapters?.length) {
|
||||
abmetadataData.chapters = cleanChaptersArray(abmetadataData.chapters, abmetadataData.metadata.title)
|
||||
abmetadataData.chapters = cleanChaptersArray(abmetadataData.chapters, abmetadataData.title)
|
||||
}
|
||||
// clean remove dupes
|
||||
if (abmetadataData.metadata.authors?.length) {
|
||||
abmetadataData.metadata.authors = [...new Set(abmetadataData.metadata.authors.map(t => t?.trim()).filter(t => t))]
|
||||
if (abmetadataData.authors?.length) {
|
||||
abmetadataData.authors = [...new Set(abmetadataData.authors.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
if (abmetadataData.metadata.narrators?.length) {
|
||||
abmetadataData.metadata.narrators = [...new Set(abmetadataData.metadata.narrators.map(t => t?.trim()).filter(t => t))]
|
||||
if (abmetadataData.narrators?.length) {
|
||||
abmetadataData.narrators = [...new Set(abmetadataData.narrators.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
if (abmetadataData.metadata.genres?.length) {
|
||||
abmetadataData.metadata.genres = [...new Set(abmetadataData.metadata.genres.map(t => t?.trim()).filter(t => t))]
|
||||
if (abmetadataData.genres?.length) {
|
||||
abmetadataData.genres = [...new Set(abmetadataData.genres.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
return abmetadataData
|
||||
} catch (error) {
|
||||
|
|
@ -522,73 +87,3 @@ function cleanChaptersArray(chaptersArray, mediaTitle) {
|
|||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
// Input text from abmetadata file and return object of media changes
|
||||
// only returns object of changes. empty object means no changes
|
||||
function parseAndCheckForUpdates(text, media, mediaType, isJSON) {
|
||||
if (!text || !media || !media.metadata || !mediaType) {
|
||||
Logger.error(`Invalid inputs to parseAndCheckForUpdates`)
|
||||
return null
|
||||
}
|
||||
|
||||
const mediaMetadata = media.metadata
|
||||
const metadataUpdatePayload = {} // Only updated key/values
|
||||
|
||||
let abmetadataData = null
|
||||
|
||||
if (isJSON) {
|
||||
abmetadataData = parseJsonMetadataText(text)
|
||||
} else {
|
||||
abmetadataData = parseAbMetadataText(text, mediaType)
|
||||
}
|
||||
|
||||
if (!abmetadataData || !abmetadataData.metadata) {
|
||||
Logger.error(`[abmetadataGenerator] Invalid metadata file`)
|
||||
return null
|
||||
}
|
||||
|
||||
const abMetadata = abmetadataData.metadata // Metadata from abmetadata file
|
||||
for (const key in abMetadata) {
|
||||
if (mediaMetadata[key] !== undefined) {
|
||||
if (key === 'authors') {
|
||||
const authorUpdatePayload = checkUpdatedBookAuthors(abMetadata[key], mediaMetadata[key])
|
||||
if (authorUpdatePayload.hasUpdates) metadataUpdatePayload.authors = authorUpdatePayload.authors
|
||||
} else if (key === 'series') {
|
||||
const seriesUpdatePayload = checkUpdatedBookSeries(abMetadata[key], mediaMetadata[key])
|
||||
if (seriesUpdatePayload.hasUpdates) metadataUpdatePayload.series = seriesUpdatePayload.series
|
||||
} else if (key === 'genres' || key === 'narrators') { // Compare array differences
|
||||
if (checkArraysChanged(abMetadata[key], mediaMetadata[key])) {
|
||||
metadataUpdatePayload[key] = abMetadata[key]
|
||||
}
|
||||
} else if (abMetadata[key] !== mediaMetadata[key]) {
|
||||
metadataUpdatePayload[key] = abMetadata[key]
|
||||
}
|
||||
} else {
|
||||
Logger.warn('[abmetadataGenerator] Invalid key', key)
|
||||
}
|
||||
}
|
||||
|
||||
const updatePayload = {} // Only updated key/values
|
||||
// Check update tags
|
||||
if (abmetadataData.tags) {
|
||||
if (checkArraysChanged(abmetadataData.tags, media.tags)) {
|
||||
updatePayload.tags = abmetadataData.tags
|
||||
}
|
||||
}
|
||||
|
||||
if (abmetadataData.chapters && mediaType === 'book') {
|
||||
const abmetadataChaptersCleaned = cleanChaptersArray(abmetadataData.chapters)
|
||||
if (abmetadataChaptersCleaned) {
|
||||
if (!areEquivalent(abmetadataChaptersCleaned, media.chapters)) {
|
||||
updatePayload.chapters = abmetadataChaptersCleaned
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataUpdatePayload).length) {
|
||||
updatePayload.metadata = metadataUpdatePayload
|
||||
}
|
||||
|
||||
return updatePayload
|
||||
}
|
||||
module.exports.parseAndCheckForUpdates = parseAndCheckForUpdates
|
||||
|
|
|
|||
|
|
@ -65,6 +65,9 @@ module.exports.getId = (prepend = '') => {
|
|||
}
|
||||
|
||||
function elapsedPretty(seconds) {
|
||||
if (seconds > 0 && seconds < 1) {
|
||||
return `${Math.floor(seconds * 1000)} ms`
|
||||
}
|
||||
if (seconds < 60) {
|
||||
return `${Math.floor(seconds)} sec`
|
||||
}
|
||||
|
|
@ -166,4 +169,39 @@ module.exports.getTitleIgnorePrefix = (title) => {
|
|||
module.exports.getTitlePrefixAtEnd = (title) => {
|
||||
let [sort, prefix] = getTitleParts(title)
|
||||
return prefix ? `${sort}, ${prefix}` : title
|
||||
}
|
||||
|
||||
/**
|
||||
* to lower case for only ascii characters
|
||||
* used to handle sqlite that doesnt support unicode lower
|
||||
* @see https://github.com/advplyr/audiobookshelf/issues/2187
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
module.exports.asciiOnlyToLowerCase = (str) => {
|
||||
if (!str) return ''
|
||||
|
||||
let temp = ''
|
||||
for (let chars of str) {
|
||||
let value = chars.charCodeAt()
|
||||
if (value >= 65 && value <= 90) {
|
||||
temp += String.fromCharCode(value + 32)
|
||||
} else {
|
||||
temp += chars
|
||||
}
|
||||
}
|
||||
return temp
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape string used in RegExp
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
module.exports.escapeRegExp = (str) => {
|
||||
if (typeof str !== 'string') return ''
|
||||
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
}
|
||||
93
server/utils/migrations/absMetadataMigration.js
Normal file
93
server/utils/migrations/absMetadataMigration.js
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const fsExtra = require('../../libs/fsExtra')
|
||||
const fileUtils = require('../fileUtils')
|
||||
const LibraryFile = require('../../objects/files/LibraryFile')
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../../models/LibraryItem')} libraryItem
|
||||
* @returns {Promise<boolean>} false if failed
|
||||
*/
|
||||
async function writeMetadataFileForItem(libraryItem) {
|
||||
const storeMetadataWithItem = global.ServerSettings.storeMetadataWithItem && !libraryItem.isFile
|
||||
const metadataPath = storeMetadataWithItem ? libraryItem.path : Path.join(global.MetadataPath, 'items', libraryItem.id)
|
||||
const metadataFilepath = fileUtils.filePathToPOSIX(Path.join(metadataPath, 'metadata.json'))
|
||||
if ((await fsExtra.pathExists(metadataFilepath))) {
|
||||
// Metadata file already exists do nothing
|
||||
return null
|
||||
}
|
||||
Logger.info(`[absMetadataMigration] metadata file not found at "${metadataFilepath}" - creating`)
|
||||
|
||||
if (!storeMetadataWithItem) {
|
||||
// Ensure /metadata/items/<lid> dir
|
||||
await fsExtra.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataJson = libraryItem.media.getAbsMetadataJson()
|
||||
|
||||
// Save to file
|
||||
const success = await fsExtra.writeFile(metadataFilepath, JSON.stringify(metadataJson, null, 2)).then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetadataMigration] failed to save metadata file at "${metadataFilepath}"`, error.message || error)
|
||||
return false
|
||||
})
|
||||
|
||||
if (!success) return false
|
||||
if (!storeMetadataWithItem) return true // No need to do anything else
|
||||
|
||||
// Safety check to make sure library file with the same path isnt already there
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== metadataFilepath)
|
||||
|
||||
// Put new library file in library item
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilepath, 'metadata.json')
|
||||
libraryItem.libraryFiles.push(newLibraryFile.toJSON())
|
||||
|
||||
// Update library item timestamps and total size
|
||||
const libraryItemDirTimestamps = await fileUtils.getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
|
||||
libraryItem.changed('libraryFiles', true)
|
||||
return libraryItem.save().then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetadataMigration] failed to save libraryItem "${libraryItem.id}"`, error.message || error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../../Database')} Database
|
||||
* @param {number} [offset=0]
|
||||
* @param {number} [totalCreated=0]
|
||||
*/
|
||||
async function runMigration(Database, offset = 0, totalCreated = 0) {
|
||||
const libraryItems = await Database.libraryItemModel.getLibraryItemsIncrement(offset, 500, { isMissing: false })
|
||||
if (!libraryItems.length) return totalCreated
|
||||
|
||||
let numCreated = 0
|
||||
for (const libraryItem of libraryItems) {
|
||||
const success = await writeMetadataFileForItem(libraryItem)
|
||||
if (success) numCreated++
|
||||
}
|
||||
|
||||
if (libraryItems.length < 500) {
|
||||
return totalCreated + numCreated
|
||||
}
|
||||
return runMigration(Database, offset + libraryItems.length, totalCreated + numCreated)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../../Database')} Database
|
||||
*/
|
||||
module.exports.migrate = async (Database) => {
|
||||
Logger.info(`[absMetadataMigration] Starting metadata.json migration`)
|
||||
const totalCreated = await runMigration(Database)
|
||||
Logger.info(`[absMetadataMigration] Finished metadata.json migration (${totalCreated} files created)`)
|
||||
}
|
||||
|
|
@ -1,13 +1,6 @@
|
|||
const xml2js = require('xml2js')
|
||||
const Logger = require('../../Logger')
|
||||
|
||||
// given a list of audio files, extract all of the Overdrive Media Markers metaTags, and return an array of them as XML
|
||||
function extractOverdriveMediaMarkers(includedAudioFiles) {
|
||||
Logger.debug('[parseOverdriveMediaMarkers] Extracting overdrive media markers')
|
||||
var markers = includedAudioFiles.map((af) => af.metaTags.tagOverdriveMediaMarker).filter(af => af) || []
|
||||
|
||||
return markers
|
||||
}
|
||||
|
||||
// given the array of Overdrive Media Markers from generateOverdriveMediaMarkers()
|
||||
// parse and clean them in to something a bit more usable
|
||||
function cleanOverdriveMediaMarkers(overdriveMediaMarkers) {
|
||||
|
|
@ -29,12 +22,11 @@ function cleanOverdriveMediaMarkers(overdriveMediaMarkers) {
|
|||
]
|
||||
*/
|
||||
|
||||
var parseString = require('xml2js').parseString // function to convert xml to JSON
|
||||
var parsedOverdriveMediaMarkers = []
|
||||
|
||||
const parsedOverdriveMediaMarkers = []
|
||||
overdriveMediaMarkers.forEach((item, index) => {
|
||||
var parsed_result = null
|
||||
parseString(item, function (err, result) {
|
||||
let parsed_result = null
|
||||
// convert xml to JSON
|
||||
xml2js.parseString(item, function (err, result) {
|
||||
/*
|
||||
result.Markers.Marker is the result of parsing the XML for the MediaMarker tags for the MP3 file (Part##.mp3)
|
||||
it is shaped like this, and needs further cleaning below:
|
||||
|
|
@ -54,7 +46,7 @@ function cleanOverdriveMediaMarkers(overdriveMediaMarkers) {
|
|||
*/
|
||||
|
||||
// The values for Name and Time in results.Markers.Marker are returned as Arrays from parseString and should be strings
|
||||
if (result && result.Markers && result.Markers.Marker) {
|
||||
if (result?.Markers?.Marker) {
|
||||
parsed_result = objectValuesArrayToString(result.Markers.Marker)
|
||||
}
|
||||
})
|
||||
|
|
@ -138,22 +130,13 @@ function generateParsedChapters(includedAudioFiles, cleanedOverdriveMediaMarkers
|
|||
return parsedChapters
|
||||
}
|
||||
|
||||
module.exports.overdriveMediaMarkersExist = (includedAudioFiles) => {
|
||||
return extractOverdriveMediaMarkers(includedAudioFiles).length > 1
|
||||
}
|
||||
|
||||
module.exports.parseOverdriveMediaMarkersAsChapters = (includedAudioFiles) => {
|
||||
Logger.info('[parseOverdriveMediaMarkers] Parsing of Overdrive Media Markers started')
|
||||
|
||||
var overdriveMediaMarkers = extractOverdriveMediaMarkers(includedAudioFiles)
|
||||
const overdriveMediaMarkers = includedAudioFiles.map((af) => af.metaTags.tagOverdriveMediaMarker).filter(af => af) || []
|
||||
if (!overdriveMediaMarkers.length) return null
|
||||
|
||||
var cleanedOverdriveMediaMarkers = cleanOverdriveMediaMarkers(overdriveMediaMarkers)
|
||||
// TODO: generateParsedChapters requires overdrive media markers and included audio files length to be the same
|
||||
// so if not equal then we must exit
|
||||
if (cleanedOverdriveMediaMarkers.length !== includedAudioFiles.length) return null
|
||||
|
||||
var parsedChapters = generateParsedChapters(includedAudioFiles, cleanedOverdriveMediaMarkers)
|
||||
|
||||
return parsedChapters
|
||||
return generateParsedChapters(includedAudioFiles, cleanedOverdriveMediaMarkers)
|
||||
}
|
||||
|
|
@ -4,7 +4,7 @@ const { xmlToJSON, levenshteinDistance } = require('./index')
|
|||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
|
||||
function extractFirstArrayItem(json, key) {
|
||||
if (!json[key] || !json[key].length) return null
|
||||
if (!json[key]?.length) return null
|
||||
return json[key][0]
|
||||
}
|
||||
|
||||
|
|
@ -66,7 +66,7 @@ function extractPodcastMetadata(channel) {
|
|||
arrayFields.forEach((key) => {
|
||||
const cleanKey = key.split(':').pop()
|
||||
let value = extractFirstArrayItem(channel, key)
|
||||
if (value && value['_']) value = value['_']
|
||||
if (value?.['_']) value = value['_']
|
||||
metadata[cleanKey] = value
|
||||
})
|
||||
return metadata
|
||||
|
|
@ -110,17 +110,30 @@ function extractEpisodeData(item) {
|
|||
const pubDate = extractFirstArrayItem(item, 'pubDate')
|
||||
if (typeof pubDate === 'string') {
|
||||
episode.pubDate = pubDate
|
||||
} else if (pubDate && typeof pubDate._ === 'string') {
|
||||
} else if (typeof pubDate?._ === 'string') {
|
||||
episode.pubDate = pubDate._
|
||||
} else {
|
||||
Logger.error(`[podcastUtils] Invalid pubDate ${item['pubDate']} for ${episode.enclosure.url}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (item['guid']) {
|
||||
const guidItem = extractFirstArrayItem(item, 'guid')
|
||||
if (typeof guidItem === 'string') {
|
||||
episode.guid = guidItem
|
||||
} else if (typeof guidItem?._ === 'string') {
|
||||
episode.guid = guidItem._
|
||||
} else {
|
||||
Logger.error(`[podcastUtils] Invalid guid ${item['guid']} for ${episode.enclosure.url}`)
|
||||
}
|
||||
}
|
||||
|
||||
const arrayFields = ['title', 'itunes:episodeType', 'itunes:season', 'itunes:episode', 'itunes:author', 'itunes:duration', 'itunes:explicit', 'itunes:subtitle']
|
||||
arrayFields.forEach((key) => {
|
||||
const cleanKey = key.split(':').pop()
|
||||
episode[cleanKey] = extractFirstArrayItem(item, key)
|
||||
let value = extractFirstArrayItem(item, key)
|
||||
if (value?.['_']) value = value['_']
|
||||
episode[cleanKey] = value
|
||||
})
|
||||
return episode
|
||||
}
|
||||
|
|
@ -142,6 +155,7 @@ function cleanEpisodeData(data) {
|
|||
explicit: data.explicit || '',
|
||||
publishedAt,
|
||||
enclosure: data.enclosure,
|
||||
guid: data.guid || null,
|
||||
chaptersUrl: data.chaptersUrl || null,
|
||||
chaptersType: data.chaptersType || null
|
||||
}
|
||||
|
|
@ -159,16 +173,16 @@ function extractPodcastEpisodes(items) {
|
|||
}
|
||||
|
||||
function cleanPodcastJson(rssJson, excludeEpisodeMetadata) {
|
||||
if (!rssJson.channel || !rssJson.channel.length) {
|
||||
if (!rssJson.channel?.length) {
|
||||
Logger.error(`[podcastUtil] Invalid podcast no channel object`)
|
||||
return null
|
||||
}
|
||||
var channel = rssJson.channel[0]
|
||||
if (!channel.item || !channel.item.length) {
|
||||
const channel = rssJson.channel[0]
|
||||
if (!channel.item?.length) {
|
||||
Logger.error(`[podcastUtil] Invalid podcast no episodes`)
|
||||
return null
|
||||
}
|
||||
var podcast = {
|
||||
const podcast = {
|
||||
metadata: extractPodcastMetadata(channel)
|
||||
}
|
||||
if (!excludeEpisodeMetadata) {
|
||||
|
|
@ -181,8 +195,8 @@ function cleanPodcastJson(rssJson, excludeEpisodeMetadata) {
|
|||
|
||||
module.exports.parsePodcastRssFeedXml = async (xml, excludeEpisodeMetadata = false, includeRaw = false) => {
|
||||
if (!xml) return null
|
||||
var json = await xmlToJSON(xml)
|
||||
if (!json || !json.rss) {
|
||||
const json = await xmlToJSON(xml)
|
||||
if (!json?.rss) {
|
||||
Logger.error('[podcastUtils] Invalid XML or RSS feed')
|
||||
return null
|
||||
}
|
||||
|
|
@ -215,12 +229,12 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
|||
data.data = data.data.toString()
|
||||
}
|
||||
|
||||
if (!data || !data.data) {
|
||||
if (!data?.data) {
|
||||
Logger.error(`[podcastUtils] getPodcastFeed: Invalid podcast feed request response (${feedUrl})`)
|
||||
return false
|
||||
}
|
||||
Logger.debug(`[podcastUtils] getPodcastFeed for "${feedUrl}" success - parsing xml`)
|
||||
var payload = await this.parsePodcastRssFeedXml(data.data, excludeEpisodeMetadata)
|
||||
const payload = await this.parsePodcastRssFeedXml(data.data, excludeEpisodeMetadata)
|
||||
if (!payload) {
|
||||
return false
|
||||
}
|
||||
|
|
@ -246,7 +260,7 @@ module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => {
|
|||
|
||||
module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => {
|
||||
searchTitle = searchTitle.toLowerCase().trim()
|
||||
if (!feed || !feed.episodes) {
|
||||
if (!feed?.episodes) {
|
||||
return null
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -308,6 +308,8 @@ module.exports = {
|
|||
async getNewestAuthors(library, user, limit) {
|
||||
if (library.mediaType !== 'book') return { authors: [], count: 0 }
|
||||
|
||||
const { bookWhere, replacements } = libraryItemsBookFilters.getUserPermissionBookWhereQuery(user)
|
||||
|
||||
const { rows: authors, count } = await Database.authorModel.findAndCountAll({
|
||||
where: {
|
||||
libraryId: library.id,
|
||||
|
|
@ -315,9 +317,15 @@ module.exports = {
|
|||
[Sequelize.Op.gte]: new Date(new Date() - (60 * 24 * 60 * 60 * 1000)) // 60 days ago
|
||||
}
|
||||
},
|
||||
replacements,
|
||||
include: {
|
||||
model: Database.bookAuthorModel,
|
||||
required: true // Must belong to a book
|
||||
model: Database.bookModel,
|
||||
attributes: ['id', 'tags', 'explicit'],
|
||||
where: bookWhere,
|
||||
required: true, // Must belong to a book
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
limit,
|
||||
distinct: true,
|
||||
|
|
@ -328,7 +336,7 @@ module.exports = {
|
|||
|
||||
return {
|
||||
authors: authors.map((au) => {
|
||||
const numBooks = au.bookAuthors?.length || 0
|
||||
const numBooks = au.books.length || 0
|
||||
return au.getOldAuthor().toJSONExpanded(numBooks)
|
||||
}),
|
||||
count
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ const Sequelize = require('sequelize')
|
|||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const authorFilters = require('./authorFilters')
|
||||
const { asciiOnlyToLowerCase } = require('../index')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
|
@ -1013,7 +1014,8 @@ module.exports = {
|
|||
let matchText = null
|
||||
let matchKey = null
|
||||
for (const key of ['title', 'subtitle', 'asin', 'isbn']) {
|
||||
if (book[key]?.toLowerCase().includes(query)) {
|
||||
const valueToLower = asciiOnlyToLowerCase(book[key])
|
||||
if (valueToLower.includes(query)) {
|
||||
matchText = book[key]
|
||||
matchKey = key
|
||||
break
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
const Sequelize = require('sequelize')
|
||||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const { asciiOnlyToLowerCase } = require('../index')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
|
@ -364,7 +365,8 @@ module.exports = {
|
|||
let matchText = null
|
||||
let matchKey = null
|
||||
for (const key of ['title', 'author', 'itunesId', 'itunesArtistId']) {
|
||||
if (podcast[key]?.toLowerCase().includes(query)) {
|
||||
const valueToLower = asciiOnlyToLowerCase(podcast[key])
|
||||
if (valueToLower.includes(query)) {
|
||||
matchText = podcast[key]
|
||||
matchKey = key
|
||||
break
|
||||
|
|
|
|||
|
|
@ -2,6 +2,19 @@ const Path = require('path')
|
|||
const { filePathToPOSIX } = require('./fileUtils')
|
||||
const globals = require('./globals')
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const parseNameString = require('./parsers/parseNameString')
|
||||
|
||||
/**
|
||||
* @typedef LibraryItemFilenameMetadata
|
||||
* @property {string} title
|
||||
* @property {string} subtitle Book mediaType only
|
||||
* @property {string} asin Book mediaType only
|
||||
* @property {string[]} authors Book mediaType only
|
||||
* @property {string[]} narrators Book mediaType only
|
||||
* @property {string} seriesName Book mediaType only
|
||||
* @property {string} seriesSequence Book mediaType only
|
||||
* @property {string} publishedYear Book mediaType only
|
||||
*/
|
||||
|
||||
function isMediaFile(mediaType, ext, audiobooksOnly = false) {
|
||||
if (!ext) return false
|
||||
|
|
@ -210,58 +223,71 @@ function buildLibraryFile(libraryItemPath, files) {
|
|||
}
|
||||
module.exports.buildLibraryFile = buildLibraryFile
|
||||
|
||||
// Input relative filepath, output all details that can be parsed
|
||||
function getBookDataFromDir(folderPath, relPath, parseSubtitle = false) {
|
||||
relPath = filePathToPOSIX(relPath)
|
||||
var splitDir = relPath.split('/')
|
||||
/**
|
||||
* Get details parsed from filenames
|
||||
*
|
||||
* @param {string} relPath
|
||||
* @param {boolean} parseSubtitle
|
||||
* @returns {LibraryItemFilenameMetadata}
|
||||
*/
|
||||
function getBookDataFromDir(relPath, parseSubtitle = false) {
|
||||
const splitDir = relPath.split('/')
|
||||
|
||||
var folder = splitDir.pop() // Audio files will always be in the directory named for the title
|
||||
series = (splitDir.length > 1) ? splitDir.pop() : null // If there are at least 2 more directories, next furthest will be the series
|
||||
author = (splitDir.length > 0) ? splitDir.pop() : null // There could be many more directories, but only the top 3 are used for naming /author/series/title/
|
||||
|
||||
// The may contain various other pieces of metadata, these functions extract it.
|
||||
var [folder, asin] = getASIN(folder)
|
||||
var [folder, narrators] = getNarrator(folder)
|
||||
var [folder, sequence] = series ? getSequence(folder) : [folder, null]
|
||||
var [folder, publishedYear] = getPublishedYear(folder)
|
||||
var [title, subtitle] = parseSubtitle ? getSubtitle(folder) : [folder, null]
|
||||
|
||||
|
||||
return {
|
||||
mediaMetadata: {
|
||||
author,
|
||||
title,
|
||||
subtitle,
|
||||
series,
|
||||
sequence,
|
||||
publishedYear,
|
||||
narrators,
|
||||
},
|
||||
relPath: relPath, // relative audiobook path i.e. /Author Name/Book Name/..
|
||||
path: Path.posix.join(folderPath, relPath) // i.e. /audiobook/Author Name/Book Name/..
|
||||
title,
|
||||
subtitle,
|
||||
asin,
|
||||
authors: parseNameString.parse(author)?.names || [],
|
||||
narrators: parseNameString.parse(narrators)?.names || [],
|
||||
seriesName: series,
|
||||
seriesSequence: sequence,
|
||||
publishedYear
|
||||
}
|
||||
}
|
||||
module.exports.getBookDataFromDir = getBookDataFromDir
|
||||
|
||||
/**
|
||||
* Extract narrator from folder name
|
||||
*
|
||||
* @param {string} folder
|
||||
* @returns {[string, string]} [folder, narrator]
|
||||
*/
|
||||
function getNarrator(folder) {
|
||||
let pattern = /^(?<title>.*) \{(?<narrators>.*)\}$/
|
||||
let match = folder.match(pattern)
|
||||
return match ? [match.groups.title, match.groups.narrators] : [folder, null]
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract series sequence from folder name
|
||||
*
|
||||
* @example
|
||||
* 'Book 2 - Title - Subtitle'
|
||||
* 'Title - Subtitle - Vol 12'
|
||||
* 'Title - volume 9 - Subtitle'
|
||||
* 'Vol. 3 Title Here - Subtitle'
|
||||
* '1980 - Book 2 - Title'
|
||||
* 'Volume 12. Title - Subtitle'
|
||||
* '100 - Book Title'
|
||||
* '6. Title'
|
||||
* '0.5 - Book Title'
|
||||
*
|
||||
* @param {string} folder
|
||||
* @returns {[string, string]} [folder, sequence]
|
||||
*/
|
||||
function getSequence(folder) {
|
||||
// Valid ways of including a volume number:
|
||||
// [
|
||||
// 'Book 2 - Title - Subtitle',
|
||||
// 'Title - Subtitle - Vol 12',
|
||||
// 'Title - volume 9 - Subtitle',
|
||||
// 'Vol. 3 Title Here - Subtitle',
|
||||
// '1980 - Book 2 - Title',
|
||||
// 'Volume 12. Title - Subtitle',
|
||||
// '100 - Book Title',
|
||||
// '2 - Book Title',
|
||||
// '6. Title',
|
||||
// '0.5 - Book Title'
|
||||
// ]
|
||||
|
||||
// Matches a valid volume string. Also matches a book whose title starts with a 1 to 3 digit number. Will handle that later.
|
||||
let pattern = /^(?<volumeLabel>vol\.? |volume |book )?(?<sequence>\d{0,3}(?:\.\d{1,2})?)(?<trailingDot>\.?)(?: (?<suffix>.*))?$/i
|
||||
|
||||
|
|
@ -282,6 +308,12 @@ function getSequence(folder) {
|
|||
return [folder, volumeNumber]
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract published year from folder name
|
||||
*
|
||||
* @param {string} folder
|
||||
* @returns {[string, string]} [folder, publishedYear]
|
||||
*/
|
||||
function getPublishedYear(folder) {
|
||||
var publishedYear = null
|
||||
|
||||
|
|
@ -295,34 +327,73 @@ function getPublishedYear(folder) {
|
|||
return [folder, publishedYear]
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract subtitle from folder name
|
||||
*
|
||||
* @param {string} folder
|
||||
* @returns {[string, string]} [folder, subtitle]
|
||||
*/
|
||||
function getSubtitle(folder) {
|
||||
// Subtitle is everything after " - "
|
||||
var splitTitle = folder.split(' - ')
|
||||
return [splitTitle.shift(), splitTitle.join(' - ')]
|
||||
}
|
||||
|
||||
function getPodcastDataFromDir(folderPath, relPath) {
|
||||
relPath = filePathToPOSIX(relPath)
|
||||
/**
|
||||
* Extract asin from folder name
|
||||
*
|
||||
* @param {string} folder
|
||||
* @returns {[string, string]} [folder, asin]
|
||||
*/
|
||||
function getASIN(folder) {
|
||||
let asin = null
|
||||
|
||||
let pattern = /(?: |^)\[([A-Z0-9]{10})](?= |$)/ // Matches "[B0015T963C]"
|
||||
const match = folder.match(pattern)
|
||||
if (match) {
|
||||
asin = match[1]
|
||||
folder = folder.replace(match[0], '')
|
||||
}
|
||||
return [folder.trim(), asin]
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} relPath
|
||||
* @returns {LibraryItemFilenameMetadata}
|
||||
*/
|
||||
function getPodcastDataFromDir(relPath) {
|
||||
const splitDir = relPath.split('/')
|
||||
|
||||
// Audio files will always be in the directory named for the title
|
||||
const title = splitDir.pop()
|
||||
return {
|
||||
mediaMetadata: {
|
||||
title
|
||||
},
|
||||
relPath: relPath, // relative podcast path i.e. /Podcast Name/..
|
||||
path: Path.posix.join(folderPath, relPath) // i.e. /podcasts/Podcast Name/..
|
||||
title
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} libraryMediaType
|
||||
* @param {string} folderPath
|
||||
* @param {string} relPath
|
||||
* @returns {{ mediaMetadata: LibraryItemFilenameMetadata, relPath: string, path: string}}
|
||||
*/
|
||||
function getDataFromMediaDir(libraryMediaType, folderPath, relPath) {
|
||||
relPath = filePathToPOSIX(relPath)
|
||||
let fullPath = Path.posix.join(folderPath, relPath)
|
||||
let mediaMetadata = null
|
||||
|
||||
if (libraryMediaType === 'podcast') {
|
||||
return getPodcastDataFromDir(folderPath, relPath)
|
||||
} else if (libraryMediaType === 'book') {
|
||||
return getBookDataFromDir(folderPath, relPath, !!global.ServerSettings.scannerParseSubtitle)
|
||||
} else {
|
||||
return getPodcastDataFromDir(folderPath, relPath)
|
||||
mediaMetadata = getPodcastDataFromDir(relPath)
|
||||
} else { // book
|
||||
mediaMetadata = getBookDataFromDir(relPath, !!global.ServerSettings.scannerParseSubtitle)
|
||||
}
|
||||
|
||||
return {
|
||||
mediaMetadata,
|
||||
relPath,
|
||||
path: fullPath
|
||||
}
|
||||
}
|
||||
module.exports.getDataFromMediaDir = getDataFromMediaDir
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue