mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-12-19 10:19:37 +00:00
Merge branch 'master' into liaocl
This commit is contained in:
commit
2ec52a7a45
119 changed files with 11524 additions and 25894 deletions
|
|
@ -8,7 +8,6 @@ const ExtractJwt = require('passport-jwt').ExtractJwt
|
|||
const OpenIDClient = require('openid-client')
|
||||
const Database = require('./Database')
|
||||
const Logger = require('./Logger')
|
||||
const e = require('express')
|
||||
|
||||
/**
|
||||
* @class Class for handling all the authentication related functionality.
|
||||
|
|
|
|||
|
|
@ -132,6 +132,11 @@ class Database {
|
|||
return this.models.playbackSession
|
||||
}
|
||||
|
||||
/** @type {typeof import('./models/CustomMetadataProvider')} */
|
||||
get customMetadataProviderModel() {
|
||||
return this.models.customMetadataProvider
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if db file exists
|
||||
* @returns {boolean}
|
||||
|
|
@ -177,11 +182,11 @@ class Database {
|
|||
if (process.env.QUERY_LOGGING === "log") {
|
||||
// Setting QUERY_LOGGING=log will log all Sequelize queries before they run
|
||||
Logger.info(`[Database] Query logging enabled`)
|
||||
logging = (query) => Logger.dev(`Running the following query:\n ${query}`)
|
||||
logging = (query) => Logger.debug(`Running the following query:\n ${query}`)
|
||||
} else if (process.env.QUERY_LOGGING === "benchmark") {
|
||||
// Setting QUERY_LOGGING=benchmark will log all Sequelize queries and their execution times, after they run
|
||||
Logger.info(`[Database] Query benchmarking enabled"`)
|
||||
logging = (query, time) => Logger.dev(`Ran the following query in ${time}ms:\n ${query}`)
|
||||
logging = (query, time) => Logger.debug(`Ran the following query in ${time}ms:\n ${query}`)
|
||||
benchmark = true
|
||||
}
|
||||
|
||||
|
|
@ -245,6 +250,7 @@ class Database {
|
|||
require('./models/Feed').init(this.sequelize)
|
||||
require('./models/FeedEpisode').init(this.sequelize)
|
||||
require('./models/Setting').init(this.sequelize)
|
||||
require('./models/CustomMetadataProvider').init(this.sequelize)
|
||||
|
||||
return this.sequelize.sync({ force, alter: false })
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,14 +3,17 @@ const { LogLevel } = require('./utils/constants')
|
|||
|
||||
class Logger {
|
||||
constructor() {
|
||||
/** @type {import('./managers/LogManager')} */
|
||||
this.logManager = null
|
||||
|
||||
this.isDev = process.env.NODE_ENV !== 'production'
|
||||
this.logLevel = !this.isDev ? LogLevel.INFO : LogLevel.TRACE
|
||||
this.hideDevLogs = process.env.HIDE_DEV_LOGS === undefined ? !this.isDev : process.env.HIDE_DEV_LOGS === '1'
|
||||
this.socketListeners = []
|
||||
|
||||
this.logManager = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
get timestamp() {
|
||||
return date.format(new Date(), 'YYYY-MM-DD HH:mm:ss.SSS')
|
||||
}
|
||||
|
|
@ -24,6 +27,9 @@ class Logger {
|
|||
return 'UNKNOWN'
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
get source() {
|
||||
try {
|
||||
throw new Error()
|
||||
|
|
@ -63,7 +69,12 @@ class Logger {
|
|||
this.socketListeners = this.socketListeners.filter(s => s.id !== socketId)
|
||||
}
|
||||
|
||||
handleLog(level, args) {
|
||||
/**
|
||||
*
|
||||
* @param {number} level
|
||||
* @param {string[]} args
|
||||
*/
|
||||
async handleLog(level, args) {
|
||||
const logObj = {
|
||||
timestamp: this.timestamp,
|
||||
source: this.source,
|
||||
|
|
@ -72,15 +83,17 @@ class Logger {
|
|||
level
|
||||
}
|
||||
|
||||
if (level >= this.logLevel && this.logManager) {
|
||||
this.logManager.logToFile(logObj)
|
||||
}
|
||||
|
||||
// Emit log to sockets that are listening to log events
|
||||
this.socketListeners.forEach((socketListener) => {
|
||||
if (socketListener.level <= level) {
|
||||
socketListener.socket.emit('log', logObj)
|
||||
}
|
||||
})
|
||||
|
||||
// Save log to file
|
||||
if (level >= this.logLevel) {
|
||||
await this.logManager.logToFile(logObj)
|
||||
}
|
||||
}
|
||||
|
||||
setLogLevel(level) {
|
||||
|
|
@ -88,15 +101,6 @@ class Logger {
|
|||
this.debug(`Set Log Level to ${this.levelString}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Only to console and only for development
|
||||
* @param {...any} args
|
||||
*/
|
||||
dev(...args) {
|
||||
if (this.hideDevLogs) return
|
||||
console.log(`[${this.timestamp}] DEV:`, ...args)
|
||||
}
|
||||
|
||||
trace(...args) {
|
||||
if (this.logLevel > LogLevel.TRACE) return
|
||||
console.trace(`[${this.timestamp}] TRACE:`, ...args)
|
||||
|
|
@ -127,9 +131,15 @@ class Logger {
|
|||
this.handleLog(LogLevel.ERROR, args)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fatal errors are ones that exit the process
|
||||
* Fatal logs are saved to crash_logs.txt
|
||||
*
|
||||
* @param {...any} args
|
||||
*/
|
||||
fatal(...args) {
|
||||
console.error(`[${this.timestamp}] FATAL:`, ...args, `(${this.source})`)
|
||||
this.handleLog(LogLevel.FATAL, args)
|
||||
return this.handleLog(LogLevel.FATAL, args)
|
||||
}
|
||||
|
||||
note(...args) {
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ const Path = require('path')
|
|||
const Sequelize = require('sequelize')
|
||||
const express = require('express')
|
||||
const http = require('http')
|
||||
const util = require('util')
|
||||
const fs = require('./libs/fsExtra')
|
||||
const fileUpload = require('./libs/expressFileupload')
|
||||
const rateLimit = require('./libs/expressRateLimit')
|
||||
|
|
@ -21,11 +22,11 @@ const SocketAuthority = require('./SocketAuthority')
|
|||
const ApiRouter = require('./routers/ApiRouter')
|
||||
const HlsRouter = require('./routers/HlsRouter')
|
||||
|
||||
const LogManager = require('./managers/LogManager')
|
||||
const NotificationManager = require('./managers/NotificationManager')
|
||||
const EmailManager = require('./managers/EmailManager')
|
||||
const AbMergeManager = require('./managers/AbMergeManager')
|
||||
const CacheManager = require('./managers/CacheManager')
|
||||
const LogManager = require('./managers/LogManager')
|
||||
const BackupManager = require('./managers/BackupManager')
|
||||
const PlaybackSessionManager = require('./managers/PlaybackSessionManager')
|
||||
const PodcastManager = require('./managers/PodcastManager')
|
||||
|
|
@ -67,7 +68,6 @@ class Server {
|
|||
this.notificationManager = new NotificationManager()
|
||||
this.emailManager = new EmailManager()
|
||||
this.backupManager = new BackupManager()
|
||||
this.logManager = new LogManager()
|
||||
this.abMergeManager = new AbMergeManager()
|
||||
this.playbackSessionManager = new PlaybackSessionManager()
|
||||
this.podcastManager = new PodcastManager(this.watcher, this.notificationManager)
|
||||
|
|
@ -81,7 +81,7 @@ class Server {
|
|||
this.apiRouter = new ApiRouter(this)
|
||||
this.hlsRouter = new HlsRouter(this.auth, this.playbackSessionManager)
|
||||
|
||||
Logger.logManager = this.logManager
|
||||
Logger.logManager = new LogManager()
|
||||
|
||||
this.server = null
|
||||
this.io = null
|
||||
|
|
@ -102,10 +102,13 @@ class Server {
|
|||
*/
|
||||
async init() {
|
||||
Logger.info('[Server] Init v' + version)
|
||||
|
||||
await this.playbackSessionManager.removeOrphanStreams()
|
||||
|
||||
await Database.init(false)
|
||||
|
||||
await Logger.logManager.init()
|
||||
|
||||
// Create token secret if does not exist (Added v2.1.0)
|
||||
if (!Database.serverSettings.tokenSecret) {
|
||||
await this.auth.initTokenSecret()
|
||||
|
|
@ -115,7 +118,6 @@ class Server {
|
|||
await CacheManager.ensureCachePaths()
|
||||
|
||||
await this.backupManager.init()
|
||||
await this.logManager.init()
|
||||
await this.rssFeedManager.init()
|
||||
|
||||
const libraries = await Database.libraryModel.getAllOldLibraries()
|
||||
|
|
@ -135,8 +137,41 @@ class Server {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen for SIGINT and uncaught exceptions
|
||||
*/
|
||||
initProcessEventListeners() {
|
||||
let sigintAlreadyReceived = false
|
||||
process.on('SIGINT', async () => {
|
||||
if (!sigintAlreadyReceived) {
|
||||
sigintAlreadyReceived = true
|
||||
Logger.info('SIGINT (Ctrl+C) received. Shutting down...')
|
||||
await this.stop()
|
||||
Logger.info('Server stopped. Exiting.')
|
||||
} else {
|
||||
Logger.info('SIGINT (Ctrl+C) received again. Exiting immediately.')
|
||||
}
|
||||
process.exit(0)
|
||||
})
|
||||
|
||||
/**
|
||||
* @see https://nodejs.org/api/process.html#event-uncaughtexceptionmonitor
|
||||
*/
|
||||
process.on('uncaughtExceptionMonitor', async (error, origin) => {
|
||||
await Logger.fatal(`[Server] Uncaught exception origin: ${origin}, error:`, util.format('%O', error))
|
||||
})
|
||||
/**
|
||||
* @see https://nodejs.org/api/process.html#event-unhandledrejection
|
||||
*/
|
||||
process.on('unhandledRejection', async (reason, promise) => {
|
||||
await Logger.fatal(`[Server] Unhandled rejection: ${reason}, promise:`, util.format('%O', promise))
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
async start() {
|
||||
Logger.info('=== Starting Server ===')
|
||||
this.initProcessEventListeners()
|
||||
await this.init()
|
||||
|
||||
const app = express()
|
||||
|
|
@ -284,19 +319,6 @@ class Server {
|
|||
})
|
||||
app.get('/healthcheck', (req, res) => res.sendStatus(200))
|
||||
|
||||
let sigintAlreadyReceived = false
|
||||
process.on('SIGINT', async () => {
|
||||
if (!sigintAlreadyReceived) {
|
||||
sigintAlreadyReceived = true
|
||||
Logger.info('SIGINT (Ctrl+C) received. Shutting down...')
|
||||
await this.stop()
|
||||
Logger.info('Server stopped. Exiting.')
|
||||
} else {
|
||||
Logger.info('SIGINT (Ctrl+C) received again. Exiting immediately.')
|
||||
}
|
||||
process.exit(0)
|
||||
})
|
||||
|
||||
this.server.listen(this.Port, this.Host, () => {
|
||||
if (this.Host) Logger.info(`Listening on http://${this.Host}:${this.Port}`)
|
||||
else Logger.info(`Listening on port :${this.Port}`)
|
||||
|
|
|
|||
|
|
@ -116,7 +116,6 @@ class SocketAuthority {
|
|||
// Logs
|
||||
socket.on('set_log_listener', (level) => Logger.addSocketListener(socket, level))
|
||||
socket.on('remove_log_listener', () => Logger.removeSocketListener(socket.id))
|
||||
socket.on('fetch_daily_logs', () => this.Server.logManager.socketRequestDailyLogs(socket))
|
||||
|
||||
// Sent automatically from socket.io clients
|
||||
socket.on('disconnect', (reason) => {
|
||||
|
|
|
|||
117
server/controllers/CustomMetadataProviderController.js
Normal file
117
server/controllers/CustomMetadataProviderController.js
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
const Logger = require('../Logger')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const Database = require('../Database')
|
||||
|
||||
const { validateUrl } = require('../utils/index')
|
||||
|
||||
//
|
||||
// This is a controller for routes that don't have a home yet :(
|
||||
//
|
||||
class CustomMetadataProviderController {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* GET: /api/custom-metadata-providers
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getAll(req, res) {
|
||||
const providers = await Database.customMetadataProviderModel.findAll()
|
||||
|
||||
res.json({
|
||||
providers
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/custom-metadata-providers
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async create(req, res) {
|
||||
const { name, url, mediaType, authHeaderValue } = req.body
|
||||
|
||||
if (!name || !url || !mediaType) {
|
||||
return res.status(400).send('Invalid request body')
|
||||
}
|
||||
|
||||
const validUrl = validateUrl(url)
|
||||
if (!validUrl) {
|
||||
Logger.error(`[CustomMetadataProviderController] Invalid url "${url}"`)
|
||||
return res.status(400).send('Invalid url')
|
||||
}
|
||||
|
||||
const provider = await Database.customMetadataProviderModel.create({
|
||||
name,
|
||||
mediaType,
|
||||
url,
|
||||
authHeaderValue: !authHeaderValue ? null : authHeaderValue,
|
||||
})
|
||||
|
||||
// TODO: Necessary to emit to all clients?
|
||||
SocketAuthority.emitter('custom_metadata_provider_added', provider.toClientJson())
|
||||
|
||||
res.json({
|
||||
provider
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE: /api/custom-metadata-providers/:id
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async delete(req, res) {
|
||||
const slug = `custom-${req.params.id}`
|
||||
|
||||
/** @type {import('../models/CustomMetadataProvider')} */
|
||||
const provider = req.customMetadataProvider
|
||||
const providerClientJson = provider.toClientJson()
|
||||
|
||||
const fallbackProvider = provider.mediaType === 'book' ? 'google' : 'itunes'
|
||||
|
||||
await provider.destroy()
|
||||
|
||||
// Libraries using this provider fallback to default provider
|
||||
await Database.libraryModel.update({
|
||||
provider: fallbackProvider
|
||||
}, {
|
||||
where: {
|
||||
provider: slug
|
||||
}
|
||||
})
|
||||
|
||||
// TODO: Necessary to emit to all clients?
|
||||
SocketAuthority.emitter('custom_metadata_provider_removed', providerClientJson)
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware that requires admin or up
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
* @param {import('express').NextFunction} next
|
||||
*/
|
||||
async middleware(req, res, next) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.warn(`[CustomMetadataProviderController] Non-admin user "${req.user.username}" attempted access route "${req.path}"`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// If id param then add req.customMetadataProvider
|
||||
if (req.params.id) {
|
||||
req.customMetadataProvider = await Database.customMetadataProviderModel.findByPk(req.params.id)
|
||||
if (!req.customMetadataProvider) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
module.exports = new CustomMetadataProviderController()
|
||||
|
|
@ -33,6 +33,14 @@ class LibraryController {
|
|||
return res.status(500).send('Invalid request')
|
||||
}
|
||||
|
||||
// Validate that the custom provider exists if given any
|
||||
if (newLibraryPayload.provider?.startsWith('custom-')) {
|
||||
if (!await Database.customMetadataProviderModel.checkExistsBySlug(newLibraryPayload.provider)) {
|
||||
Logger.error(`[LibraryController] Custom metadata provider "${newLibraryPayload.provider}" does not exist`)
|
||||
return res.status(400).send('Custom metadata provider does not exist')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate folder paths exist or can be created & resolve rel paths
|
||||
// returns 400 if a folder fails to access
|
||||
newLibraryPayload.folders = newLibraryPayload.folders.map(f => {
|
||||
|
|
@ -86,19 +94,27 @@ class LibraryController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/libraries/:id
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async findOne(req, res) {
|
||||
const includeArray = (req.query.include || '').split(',')
|
||||
if (includeArray.includes('filterdata')) {
|
||||
const filterdata = await libraryFilters.getFilterData(req.library.mediaType, req.library.id)
|
||||
const customMetadataProviders = await Database.customMetadataProviderModel.getForClientByMediaType(req.library.mediaType)
|
||||
|
||||
return res.json({
|
||||
filterdata,
|
||||
issues: filterdata.numIssues,
|
||||
numUserPlaylists: await Database.playlistModel.getNumPlaylistsForUserAndLibrary(req.user.id, req.library.id),
|
||||
customMetadataProviders,
|
||||
library: req.library
|
||||
})
|
||||
}
|
||||
return res.json(req.library)
|
||||
res.json(req.library)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -115,6 +131,14 @@ class LibraryController {
|
|||
async update(req, res) {
|
||||
const library = req.library
|
||||
|
||||
// Validate that the custom provider exists if given any
|
||||
if (req.body.provider?.startsWith('custom-')) {
|
||||
if (!await Database.customMetadataProviderModel.checkExistsBySlug(req.body.provider)) {
|
||||
Logger.error(`[LibraryController] Custom metadata provider "${req.body.provider}" does not exist`)
|
||||
return res.status(400).send('Custom metadata provider does not exist')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate new folder paths exist or can be created & resolve rel paths
|
||||
// returns 400 if a new folder fails to access
|
||||
if (req.body.folders) {
|
||||
|
|
|
|||
|
|
@ -124,11 +124,6 @@ class LibraryItemController {
|
|||
const libraryItem = req.libraryItem
|
||||
const mediaPayload = req.body
|
||||
|
||||
// Item has cover and update is removing cover so purge it from cache
|
||||
if (libraryItem.media.coverPath && (mediaPayload.coverPath === '' || mediaPayload.coverPath === null)) {
|
||||
await CacheManager.purgeCoverCache(libraryItem.id)
|
||||
}
|
||||
|
||||
// Book specific
|
||||
if (libraryItem.isBook) {
|
||||
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId)
|
||||
|
|
|
|||
|
|
@ -336,7 +336,7 @@ class MeController {
|
|||
}
|
||||
|
||||
/**
|
||||
* GET: /api/stats/year/:year
|
||||
* GET: /api/me/stats/year/:year
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
|
|
|
|||
|
|
@ -699,7 +699,7 @@ class MiscController {
|
|||
}
|
||||
|
||||
/**
|
||||
* GET: /api/me/stats/year/:year
|
||||
* GET: /api/stats/year/:year
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
|
|
@ -717,5 +717,23 @@ class MiscController {
|
|||
const stats = await adminStats.getStatsForYear(year)
|
||||
res.json(stats)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/logger-data
|
||||
* admin or up
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getLoggerData(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user "${req.user.username}" attempted to get logger data`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
res.json({
|
||||
currentDailyLogs: Logger.logManager.getMostRecentCurrentDailyLogs()
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = new MiscController()
|
||||
|
|
|
|||
|
|
@ -161,7 +161,7 @@ class SessionController {
|
|||
* @typedef batchDeleteReqBody
|
||||
* @property {string[]} sessions
|
||||
*
|
||||
* @param {import('express').Request<{}, {}, batchDeleteReqBody, {}} req
|
||||
* @param {import('express').Request<{}, {}, batchDeleteReqBody, {}>} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async batchDelete(req, res) {
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ const iTunes = require('../providers/iTunes')
|
|||
const Audnexus = require('../providers/Audnexus')
|
||||
const FantLab = require('../providers/FantLab')
|
||||
const AudiobookCovers = require('../providers/AudiobookCovers')
|
||||
const CustomProviderAdapter = require('../providers/CustomProviderAdapter')
|
||||
const Logger = require('../Logger')
|
||||
const { levenshteinDistance, escapeRegExp } = require('../utils/index')
|
||||
|
||||
|
|
@ -17,6 +18,7 @@ class BookFinder {
|
|||
this.audnexus = new Audnexus()
|
||||
this.fantLab = new FantLab()
|
||||
this.audiobookCovers = new AudiobookCovers()
|
||||
this.customProviderAdapter = new CustomProviderAdapter()
|
||||
|
||||
this.providers = ['google', 'itunes', 'openlibrary', 'fantlab', 'audiobookcovers', 'audible', 'audible.ca', 'audible.uk', 'audible.au', 'audible.fr', 'audible.de', 'audible.jp', 'audible.it', 'audible.in', 'audible.es']
|
||||
|
||||
|
|
@ -147,6 +149,20 @@ class BookFinder {
|
|||
return books
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
* @param {string} providerSlug
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async getCustomProviderResults(title, author, providerSlug) {
|
||||
const books = await this.customProviderAdapter.search(title, author, providerSlug, 'book')
|
||||
if (this.verbose) Logger.debug(`Custom provider '${providerSlug}' Search Results: ${books.length || 0}`)
|
||||
|
||||
return books
|
||||
}
|
||||
|
||||
static TitleCandidates = class {
|
||||
|
||||
constructor(cleanAuthor) {
|
||||
|
|
@ -315,6 +331,11 @@ class BookFinder {
|
|||
const maxFuzzySearches = !isNaN(options.maxFuzzySearches) ? Number(options.maxFuzzySearches) : 5
|
||||
let numFuzzySearches = 0
|
||||
|
||||
// Custom providers are assumed to be correct
|
||||
if (provider.startsWith('custom-')) {
|
||||
return this.getCustomProviderResults(title, author, provider)
|
||||
}
|
||||
|
||||
if (!title)
|
||||
return books
|
||||
|
||||
|
|
@ -397,8 +418,7 @@ class BookFinder {
|
|||
books = await this.getFantLabResults(title, author)
|
||||
} else if (provider === 'audiobookcovers') {
|
||||
books = await this.getAudiobookCoversResults(title)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
books = await this.getGoogleBooksResults(title, author)
|
||||
}
|
||||
return books
|
||||
|
|
|
|||
21
server/libs/libarchive/LICENSE
Normal file
21
server/libs/libarchive/LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 ნიკა
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
262
server/libs/libarchive/archive.js
Normal file
262
server/libs/libarchive/archive.js
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
/**
|
||||
* Modified from https://github.com/nika-begiashvili/libarchivejs
|
||||
*/
|
||||
|
||||
const Path = require('path')
|
||||
const { Worker } = require('worker_threads')
|
||||
|
||||
/**
|
||||
* Represents compressed file before extraction
|
||||
*/
|
||||
class CompressedFile {
|
||||
|
||||
constructor(name, size, path, archiveRef) {
|
||||
this._name = name
|
||||
this._size = size
|
||||
this._path = path
|
||||
this._archiveRef = archiveRef
|
||||
}
|
||||
|
||||
/**
|
||||
* file name
|
||||
*/
|
||||
get name() {
|
||||
return this._name
|
||||
}
|
||||
/**
|
||||
* file size
|
||||
*/
|
||||
get size() {
|
||||
return this._size
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract file from archive
|
||||
* @returns {Promise<File>} extracted file
|
||||
*/
|
||||
extract() {
|
||||
return this._archiveRef.extractSingleFile(this._path)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class Archive {
|
||||
/**
|
||||
* Creates new archive instance from browser native File object
|
||||
* @param {Buffer} fileBuffer
|
||||
* @param {object} options
|
||||
* @returns {Archive}
|
||||
*/
|
||||
static open(fileBuffer) {
|
||||
const arch = new Archive(fileBuffer, { workerUrl: Path.join(__dirname, 'libarchiveWorker.js') })
|
||||
return arch.open()
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new archive
|
||||
* @param {File} file
|
||||
* @param {Object} options
|
||||
*/
|
||||
constructor(file, options) {
|
||||
this._worker = new Worker(options.workerUrl)
|
||||
this._worker.on('message', this._workerMsg.bind(this))
|
||||
|
||||
this._callbacks = []
|
||||
this._content = {}
|
||||
this._processed = 0
|
||||
this._file = file
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares file for reading
|
||||
* @returns {Promise<Archive>} archive instance
|
||||
*/
|
||||
async open() {
|
||||
await this._postMessage({ type: 'HELLO' }, (resolve, reject, msg) => {
|
||||
if (msg.type === 'READY') {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
return await this._postMessage({ type: 'OPEN', file: this._file }, (resolve, reject, msg) => {
|
||||
if (msg.type === 'OPENED') {
|
||||
resolve(this)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminate worker to free up memory
|
||||
*/
|
||||
close() {
|
||||
this._worker.terminate()
|
||||
this._worker = null
|
||||
}
|
||||
|
||||
/**
|
||||
* detect if archive has encrypted data
|
||||
* @returns {boolean|null} null if could not be determined
|
||||
*/
|
||||
hasEncryptedData() {
|
||||
return this._postMessage({ type: 'CHECK_ENCRYPTION' },
|
||||
(resolve, reject, msg) => {
|
||||
if (msg.type === 'ENCRYPTION_STATUS') {
|
||||
resolve(msg.status)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* set password to be used when reading archive
|
||||
*/
|
||||
usePassword(archivePassword) {
|
||||
return this._postMessage({ type: 'SET_PASSPHRASE', passphrase: archivePassword },
|
||||
(resolve, reject, msg) => {
|
||||
if (msg.type === 'PASSPHRASE_STATUS') {
|
||||
resolve(msg.status)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns object containing directory structure and file information
|
||||
* @returns {Promise<object>}
|
||||
*/
|
||||
getFilesObject() {
|
||||
if (this._processed > 0) {
|
||||
return Promise.resolve().then(() => this._content)
|
||||
}
|
||||
return this._postMessage({ type: 'LIST_FILES' }, (resolve, reject, msg) => {
|
||||
if (msg.type === 'ENTRY') {
|
||||
const entry = msg.entry
|
||||
const [target, prop] = this._getProp(this._content, entry.path)
|
||||
if (entry.type === 'FILE') {
|
||||
target[prop] = new CompressedFile(entry.fileName, entry.size, entry.path, this)
|
||||
}
|
||||
return true
|
||||
} else if (msg.type === 'END') {
|
||||
this._processed = 1
|
||||
resolve(this._cloneContent(this._content))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
getFilesArray() {
|
||||
return this.getFilesObject().then((obj) => {
|
||||
return this._objectToArray(obj)
|
||||
})
|
||||
}
|
||||
|
||||
extractSingleFile(target) {
|
||||
// Prevent extraction if worker already terminated
|
||||
if (this._worker === null) {
|
||||
throw new Error("Archive already closed")
|
||||
}
|
||||
|
||||
return this._postMessage({ type: 'EXTRACT_SINGLE_FILE', target: target },
|
||||
(resolve, reject, msg) => {
|
||||
if (msg.type === 'FILE') {
|
||||
resolve(msg.entry)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns object containing directory structure and extracted File objects
|
||||
* @param {Function} extractCallback
|
||||
*
|
||||
*/
|
||||
extractFiles(extractCallback) {
|
||||
if (this._processed > 1) {
|
||||
return Promise.resolve().then(() => this._content)
|
||||
}
|
||||
return this._postMessage({ type: 'EXTRACT_FILES' }, (resolve, reject, msg) => {
|
||||
if (msg.type === 'ENTRY') {
|
||||
const [target, prop] = this._getProp(this._content, msg.entry.path)
|
||||
if (msg.entry.type === 'FILE') {
|
||||
target[prop] = msg.entry
|
||||
if (extractCallback !== undefined) {
|
||||
setTimeout(extractCallback.bind(null, {
|
||||
file: target[prop],
|
||||
path: msg.entry.path,
|
||||
}))
|
||||
}
|
||||
}
|
||||
return true
|
||||
} else if (msg.type === 'END') {
|
||||
this._processed = 2
|
||||
this._worker.terminate()
|
||||
resolve(this._cloneContent(this._content))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
_cloneContent(obj) {
|
||||
if (obj instanceof CompressedFile || obj === null) return obj
|
||||
const o = {}
|
||||
for (const prop of Object.keys(obj)) {
|
||||
o[prop] = this._cloneContent(obj[prop])
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
_objectToArray(obj, path = '') {
|
||||
const files = []
|
||||
for (const key of Object.keys(obj)) {
|
||||
if (obj[key] instanceof CompressedFile || obj[key] === null) {
|
||||
files.push({
|
||||
file: obj[key] || key,
|
||||
path: path
|
||||
})
|
||||
} else {
|
||||
files.push(...this._objectToArray(obj[key], `${path}${key}/`))
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
_getProp(obj, path) {
|
||||
const parts = path.split('/')
|
||||
if (parts[parts.length - 1] === '') parts.pop()
|
||||
let cur = obj, prev = null
|
||||
for (const part of parts) {
|
||||
cur[part] = cur[part] || {}
|
||||
prev = cur
|
||||
cur = cur[part]
|
||||
}
|
||||
return [prev, parts[parts.length - 1]]
|
||||
}
|
||||
|
||||
_postMessage(msg, callback) {
|
||||
this._worker.postMessage(msg)
|
||||
return new Promise((resolve, reject) => {
|
||||
this._callbacks.push(this._msgHandler.bind(this, callback, resolve, reject))
|
||||
})
|
||||
}
|
||||
|
||||
_msgHandler(callback, resolve, reject, msg) {
|
||||
if (!msg) {
|
||||
reject('invalid msg')
|
||||
return
|
||||
}
|
||||
if (msg.type === 'BUSY') {
|
||||
reject('worker is busy')
|
||||
} else if (msg.type === 'ERROR') {
|
||||
reject(msg.error)
|
||||
} else {
|
||||
return callback(resolve, reject, msg)
|
||||
}
|
||||
}
|
||||
|
||||
_workerMsg(msg) {
|
||||
const callback = this._callbacks[this._callbacks.length - 1]
|
||||
const next = callback(msg)
|
||||
if (!next) {
|
||||
this._callbacks.pop()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
module.exports = Archive
|
||||
72
server/libs/libarchive/libarchiveWorker.js
Normal file
72
server/libs/libarchive/libarchiveWorker.js
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
/**
|
||||
* Modified from https://github.com/nika-begiashvili/libarchivejs
|
||||
*/
|
||||
|
||||
const { parentPort } = require('worker_threads')
|
||||
const { getArchiveReader } = require('./wasm-module')
|
||||
|
||||
let reader = null
|
||||
let busy = false
|
||||
|
||||
getArchiveReader((_reader) => {
|
||||
reader = _reader
|
||||
busy = false
|
||||
parentPort.postMessage({ type: 'READY' })
|
||||
})
|
||||
|
||||
parentPort.on('message', async msg => {
|
||||
if (busy) {
|
||||
parentPort.postMessage({ type: 'BUSY' })
|
||||
return
|
||||
}
|
||||
|
||||
let skipExtraction = false
|
||||
busy = true
|
||||
try {
|
||||
switch (msg.type) {
|
||||
case 'HELLO': // module will respond READY when it's ready
|
||||
break
|
||||
case 'OPEN':
|
||||
await reader.open(msg.file)
|
||||
parentPort.postMessage({ type: 'OPENED' })
|
||||
break
|
||||
case 'LIST_FILES':
|
||||
skipExtraction = true
|
||||
// eslint-disable-next-line no-fallthrough
|
||||
case 'EXTRACT_FILES':
|
||||
for (const entry of reader.entries(skipExtraction)) {
|
||||
parentPort.postMessage({ type: 'ENTRY', entry })
|
||||
}
|
||||
parentPort.postMessage({ type: 'END' })
|
||||
break
|
||||
case 'EXTRACT_SINGLE_FILE':
|
||||
for (const entry of reader.entries(true, msg.target)) {
|
||||
if (entry.fileData) {
|
||||
parentPort.postMessage({ type: 'FILE', entry })
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'CHECK_ENCRYPTION':
|
||||
parentPort.postMessage({ type: 'ENCRYPTION_STATUS', status: reader.hasEncryptedData() })
|
||||
break
|
||||
case 'SET_PASSPHRASE':
|
||||
reader.setPassphrase(msg.passphrase)
|
||||
parentPort.postMessage({ type: 'PASSPHRASE_STATUS', status: true })
|
||||
break
|
||||
default:
|
||||
throw new Error('Invalid Command')
|
||||
}
|
||||
} catch (err) {
|
||||
parentPort.postMessage({
|
||||
type: 'ERROR',
|
||||
error: {
|
||||
message: err.message,
|
||||
name: err.name,
|
||||
stack: err.stack
|
||||
}
|
||||
})
|
||||
} finally {
|
||||
// eslint-disable-next-line require-atomic-updates
|
||||
busy = false
|
||||
}
|
||||
})
|
||||
18
server/libs/libarchive/wasm-libarchive.js
Normal file
18
server/libs/libarchive/wasm-libarchive.js
Normal file
File diff suppressed because one or more lines are too long
235
server/libs/libarchive/wasm-module.js
Normal file
235
server/libs/libarchive/wasm-module.js
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
/**
|
||||
* Modified from https://github.com/nika-begiashvili/libarchivejs
|
||||
*/
|
||||
|
||||
const Path = require('path')
|
||||
const libarchive = require('./wasm-libarchive')
|
||||
|
||||
const TYPE_MAP = {
|
||||
32768: 'FILE',
|
||||
16384: 'DIR',
|
||||
40960: 'SYMBOLIC_LINK',
|
||||
49152: 'SOCKET',
|
||||
8192: 'CHARACTER_DEVICE',
|
||||
24576: 'BLOCK_DEVICE',
|
||||
4096: 'NAMED_PIPE',
|
||||
}
|
||||
|
||||
class ArchiveReader {
|
||||
/**
|
||||
* archive reader
|
||||
* @param {WasmModule} wasmModule emscripten module
|
||||
*/
|
||||
constructor(wasmModule) {
|
||||
this._wasmModule = wasmModule
|
||||
this._runCode = wasmModule.runCode
|
||||
this._file = null
|
||||
this._passphrase = null
|
||||
}
|
||||
|
||||
/**
|
||||
* open archive, needs to closed manually
|
||||
* @param {File} file
|
||||
*/
|
||||
open(file) {
|
||||
if (this._file !== null) {
|
||||
console.warn('Closing previous file')
|
||||
this.close()
|
||||
}
|
||||
const { promise, resolve, reject } = this._promiseHandles()
|
||||
this._file = file
|
||||
this._loadFile(file, resolve, reject)
|
||||
return promise
|
||||
}
|
||||
|
||||
/**
|
||||
* close archive
|
||||
*/
|
||||
close() {
|
||||
this._runCode.closeArchive(this._archive)
|
||||
this._wasmModule._free(this._filePtr)
|
||||
this._file = null
|
||||
this._filePtr = null
|
||||
this._archive = null
|
||||
}
|
||||
|
||||
/**
|
||||
* detect if archive has encrypted data
|
||||
* @returns {boolean|null} null if could not be determined
|
||||
*/
|
||||
hasEncryptedData() {
|
||||
this._archive = this._runCode.openArchive(this._filePtr, this._fileLength, this._passphrase)
|
||||
this._runCode.getNextEntry(this._archive)
|
||||
const status = this._runCode.hasEncryptedEntries(this._archive)
|
||||
if (status === 0) {
|
||||
return false
|
||||
} else if (status > 0) {
|
||||
return true
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* set passphrase to be used with archive
|
||||
* @param {*} passphrase
|
||||
*/
|
||||
setPassphrase(passphrase) {
|
||||
this._passphrase = passphrase
|
||||
}
|
||||
|
||||
/**
|
||||
* get archive entries
|
||||
* @param {boolean} skipExtraction
|
||||
* @param {string} except don't skip this entry
|
||||
*/
|
||||
*entries(skipExtraction = false, except = null) {
|
||||
this._archive = this._runCode.openArchive(this._filePtr, this._fileLength, this._passphrase)
|
||||
let entry
|
||||
while (true) {
|
||||
entry = this._runCode.getNextEntry(this._archive)
|
||||
if (entry === 0) break
|
||||
|
||||
const entryData = {
|
||||
size: this._runCode.getEntrySize(entry),
|
||||
path: this._runCode.getEntryName(entry),
|
||||
type: TYPE_MAP[this._runCode.getEntryType(entry)],
|
||||
ref: entry,
|
||||
}
|
||||
|
||||
if (entryData.type === 'FILE') {
|
||||
let fileName = entryData.path.split('/')
|
||||
entryData.fileName = fileName[fileName.length - 1]
|
||||
}
|
||||
|
||||
if (skipExtraction && except !== entryData.path) {
|
||||
this._runCode.skipEntry(this._archive)
|
||||
} else {
|
||||
const ptr = this._runCode.getFileData(this._archive, entryData.size)
|
||||
if (ptr < 0) {
|
||||
throw new Error(this._runCode.getError(this._archive))
|
||||
}
|
||||
entryData.fileData = this._wasmModule.HEAP8.slice(ptr, ptr + entryData.size)
|
||||
this._wasmModule._free(ptr)
|
||||
}
|
||||
yield entryData
|
||||
}
|
||||
}
|
||||
|
||||
_loadFile(fileBuffer, resolve, reject) {
|
||||
try {
|
||||
const array = new Uint8Array(fileBuffer)
|
||||
this._fileLength = array.length
|
||||
this._filePtr = this._runCode.malloc(this._fileLength)
|
||||
this._wasmModule.HEAP8.set(array, this._filePtr)
|
||||
resolve()
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
}
|
||||
|
||||
_promiseHandles() {
|
||||
let resolve = null, reject = null
|
||||
const promise = new Promise((_resolve, _reject) => {
|
||||
resolve = _resolve
|
||||
reject = _reject
|
||||
})
|
||||
return { promise, resolve, reject }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class WasmModule {
|
||||
constructor() {
|
||||
this.preRun = []
|
||||
this.postRun = []
|
||||
this.totalDependencies = 0
|
||||
}
|
||||
|
||||
print(...text) {
|
||||
console.log(text)
|
||||
}
|
||||
|
||||
printErr(...text) {
|
||||
console.error(text)
|
||||
}
|
||||
|
||||
initFunctions() {
|
||||
this.runCode = {
|
||||
// const char * get_version()
|
||||
getVersion: this.cwrap('get_version', 'string', []),
|
||||
// void * archive_open( const void * buffer, size_t buffer_size)
|
||||
// retuns archive pointer
|
||||
openArchive: this.cwrap('archive_open', 'number', ['number', 'number', 'string']),
|
||||
// void * get_entry(void * archive)
|
||||
// return archive entry pointer
|
||||
getNextEntry: this.cwrap('get_next_entry', 'number', ['number']),
|
||||
// void * get_filedata( void * archive, size_t bufferSize )
|
||||
getFileData: this.cwrap('get_filedata', 'number', ['number', 'number']),
|
||||
// int archive_read_data_skip(struct archive *_a)
|
||||
skipEntry: this.cwrap('archive_read_data_skip', 'number', ['number']),
|
||||
// void archive_close( void * archive )
|
||||
closeArchive: this.cwrap('archive_close', null, ['number']),
|
||||
// la_int64_t archive_entry_size( struct archive_entry * )
|
||||
getEntrySize: this.cwrap('archive_entry_size', 'number', ['number']),
|
||||
// const char * archive_entry_pathname( struct archive_entry * )
|
||||
getEntryName: this.cwrap('archive_entry_pathname', 'string', ['number']),
|
||||
// __LA_MODE_T archive_entry_filetype( struct archive_entry * )
|
||||
/*
|
||||
#define AE_IFMT ((__LA_MODE_T)0170000)
|
||||
#define AE_IFREG ((__LA_MODE_T)0100000) // Regular file
|
||||
#define AE_IFLNK ((__LA_MODE_T)0120000) // Sybolic link
|
||||
#define AE_IFSOCK ((__LA_MODE_T)0140000) // Socket
|
||||
#define AE_IFCHR ((__LA_MODE_T)0020000) // Character device
|
||||
#define AE_IFBLK ((__LA_MODE_T)0060000) // Block device
|
||||
#define AE_IFDIR ((__LA_MODE_T)0040000) // Directory
|
||||
#define AE_IFIFO ((__LA_MODE_T)0010000) // Named pipe
|
||||
*/
|
||||
getEntryType: this.cwrap('archive_entry_filetype', 'number', ['number']),
|
||||
// const char * archive_error_string(struct archive *);
|
||||
getError: this.cwrap('archive_error_string', 'string', ['number']),
|
||||
|
||||
/*
|
||||
* Returns 1 if the archive contains at least one encrypted entry.
|
||||
* If the archive format not support encryption at all
|
||||
* ARCHIVE_READ_FORMAT_ENCRYPTION_UNSUPPORTED is returned.
|
||||
* If for any other reason (e.g. not enough data read so far)
|
||||
* we cannot say whether there are encrypted entries, then
|
||||
* ARCHIVE_READ_FORMAT_ENCRYPTION_DONT_KNOW is returned.
|
||||
* In general, this function will return values below zero when the
|
||||
* reader is uncertain or totally incapable of encryption support.
|
||||
* When this function returns 0 you can be sure that the reader
|
||||
* supports encryption detection but no encrypted entries have
|
||||
* been found yet.
|
||||
*
|
||||
* NOTE: If the metadata/header of an archive is also encrypted, you
|
||||
* cannot rely on the number of encrypted entries. That is why this
|
||||
* function does not return the number of encrypted entries but#
|
||||
* just shows that there are some.
|
||||
*/
|
||||
// __LA_DECL int archive_read_has_encrypted_entries(struct archive *);
|
||||
entryIsEncrypted: this.cwrap('archive_entry_is_encrypted', 'number', ['number']),
|
||||
hasEncryptedEntries: this.cwrap('archive_read_has_encrypted_entries', 'number', ['number']),
|
||||
// __LA_DECL int archive_read_add_passphrase(struct archive *, const char *);
|
||||
addPassphrase: this.cwrap('archive_read_add_passphrase', 'number', ['number', 'string']),
|
||||
//this.stringToUTF(str), //
|
||||
string: (str) => this.allocate(this.intArrayFromString(str), 'i8', 0),
|
||||
malloc: this.cwrap('malloc', 'number', ['number']),
|
||||
free: this.cwrap('free', null, ['number']),
|
||||
}
|
||||
}
|
||||
|
||||
monitorRunDependencies() { }
|
||||
|
||||
locateFile(path /* ,prefix */) {
|
||||
const wasmFilepath = Path.join(__dirname, `../../../client/dist/libarchive/wasm-gen/${path}`)
|
||||
return wasmFilepath
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.getArchiveReader = (cb) => {
|
||||
libarchive(new WasmModule()).then((module) => {
|
||||
module.initFunctions()
|
||||
cb(new ArchiveReader(module))
|
||||
})
|
||||
}
|
||||
|
|
@ -7,6 +7,8 @@ const imageType = require('../libs/imageType')
|
|||
const globals = require('../utils/globals')
|
||||
const { downloadImageFile, filePathToPOSIX, checkPathIsFile } = require('../utils/fileUtils')
|
||||
const { extractCoverArt } = require('../utils/ffmpegHelpers')
|
||||
const parseEbookMetadata = require('../utils/parsers/parseEbookMetadata')
|
||||
|
||||
const CacheManager = require('../managers/CacheManager')
|
||||
|
||||
class CoverManager {
|
||||
|
|
@ -234,6 +236,7 @@ class CoverManager {
|
|||
|
||||
/**
|
||||
* Extract cover art from audio file and save for library item
|
||||
*
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {string} libraryItemId
|
||||
* @param {string} [libraryItemPath] null for isFile library items
|
||||
|
|
@ -268,6 +271,44 @@ class CoverManager {
|
|||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract cover art from ebook and save for library item
|
||||
*
|
||||
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
|
||||
* @param {string} libraryItemId
|
||||
* @param {string} [libraryItemPath] null for isFile library items
|
||||
* @returns {Promise<string>} returns cover path
|
||||
*/
|
||||
async saveEbookCoverArt(ebookFileScanData, libraryItemId, libraryItemPath) {
|
||||
if (!ebookFileScanData?.ebookCoverPath) return null
|
||||
|
||||
let coverDirPath = null
|
||||
if (global.ServerSettings.storeCoverWithItem && libraryItemPath) {
|
||||
coverDirPath = libraryItemPath
|
||||
} else {
|
||||
coverDirPath = Path.posix.join(global.MetadataPath, 'items', libraryItemId)
|
||||
}
|
||||
await fs.ensureDir(coverDirPath)
|
||||
|
||||
let extname = Path.extname(ebookFileScanData.ebookCoverPath) || '.jpg'
|
||||
if (extname === '.jpeg') extname = '.jpg'
|
||||
const coverFilename = `cover${extname}`
|
||||
const coverFilePath = Path.join(coverDirPath, coverFilename)
|
||||
|
||||
// TODO: Overwrite if exists?
|
||||
const coverAlreadyExists = await fs.pathExists(coverFilePath)
|
||||
if (coverAlreadyExists) {
|
||||
Logger.warn(`[CoverManager] Extract embedded cover art but cover already exists for "${coverFilePath}" - overwriting`)
|
||||
}
|
||||
|
||||
const success = await parseEbookMetadata.extractCoverImage(ebookFileScanData, coverFilePath)
|
||||
if (success) {
|
||||
await CacheManager.purgeCoverCache(libraryItemId)
|
||||
return coverFilePath
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} url
|
||||
|
|
|
|||
|
|
@ -1,19 +1,34 @@
|
|||
const Path = require('path')
|
||||
const fs = require('../libs/fsExtra')
|
||||
|
||||
const Logger = require('../Logger')
|
||||
const DailyLog = require('../objects/DailyLog')
|
||||
|
||||
const Logger = require('../Logger')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
|
||||
const TAG = '[LogManager]'
|
||||
|
||||
/**
|
||||
* @typedef LogObject
|
||||
* @property {string} timestamp
|
||||
* @property {string} source
|
||||
* @property {string} message
|
||||
* @property {string} levelName
|
||||
* @property {number} level
|
||||
*/
|
||||
|
||||
class LogManager {
|
||||
constructor() {
|
||||
this.DailyLogPath = Path.posix.join(global.MetadataPath, 'logs', 'daily')
|
||||
this.ScanLogPath = Path.posix.join(global.MetadataPath, 'logs', 'scans')
|
||||
|
||||
/** @type {DailyLog} */
|
||||
this.currentDailyLog = null
|
||||
|
||||
/** @type {LogObject[]} */
|
||||
this.dailyLogBuffer = []
|
||||
|
||||
/** @type {string[]} */
|
||||
this.dailyLogFiles = []
|
||||
}
|
||||
|
||||
|
|
@ -26,12 +41,12 @@ class LogManager {
|
|||
await fs.ensureDir(this.ScanLogPath)
|
||||
}
|
||||
|
||||
async ensureScanLogDir() {
|
||||
if (!(await fs.pathExists(this.ScanLogPath))) {
|
||||
await fs.mkdir(this.ScanLogPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 1. Ensure log directories exist
|
||||
* 2. Load daily log files
|
||||
* 3. Remove old daily log files
|
||||
* 4. Create/set current daily log file
|
||||
*/
|
||||
async init() {
|
||||
await this.ensureLogDirs()
|
||||
|
||||
|
|
@ -46,11 +61,11 @@ class LogManager {
|
|||
}
|
||||
}
|
||||
|
||||
// set current daily log file or create if does not exist
|
||||
const currentDailyLogFilename = DailyLog.getCurrentDailyLogFilename()
|
||||
Logger.info(TAG, `Init current daily log filename: ${currentDailyLogFilename}`)
|
||||
|
||||
this.currentDailyLog = new DailyLog()
|
||||
this.currentDailyLog.setData({ dailyLogDirPath: this.DailyLogPath })
|
||||
this.currentDailyLog = new DailyLog(this.DailyLogPath)
|
||||
|
||||
if (this.dailyLogFiles.includes(currentDailyLogFilename)) {
|
||||
Logger.debug(TAG, `Daily log file already exists - set in Logger`)
|
||||
|
|
@ -59,7 +74,7 @@ class LogManager {
|
|||
this.dailyLogFiles.push(this.currentDailyLog.filename)
|
||||
}
|
||||
|
||||
// Log buffered Logs
|
||||
// Log buffered daily logs
|
||||
if (this.dailyLogBuffer.length) {
|
||||
this.dailyLogBuffer.forEach((logObj) => {
|
||||
this.currentDailyLog.appendLog(logObj)
|
||||
|
|
@ -68,9 +83,12 @@ class LogManager {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all daily log filenames in /metadata/logs/daily
|
||||
*/
|
||||
async scanLogFiles() {
|
||||
const dailyFiles = await fs.readdir(this.DailyLogPath)
|
||||
if (dailyFiles && dailyFiles.length) {
|
||||
if (dailyFiles?.length) {
|
||||
dailyFiles.forEach((logFile) => {
|
||||
if (Path.extname(logFile) === '.txt') {
|
||||
Logger.debug('Daily Log file found', logFile)
|
||||
|
|
@ -83,30 +101,38 @@ class LogManager {
|
|||
this.dailyLogFiles.sort()
|
||||
}
|
||||
|
||||
async removeOldestLog() {
|
||||
if (!this.dailyLogFiles.length) return
|
||||
const oldestLog = this.dailyLogFiles[0]
|
||||
return this.removeLogFile(oldestLog)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} filename
|
||||
*/
|
||||
async removeLogFile(filename) {
|
||||
const fullPath = Path.join(this.DailyLogPath, filename)
|
||||
const exists = await fs.pathExists(fullPath)
|
||||
if (!exists) {
|
||||
Logger.error(TAG, 'Invalid log dne ' + fullPath)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf.filename !== filename)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf !== filename)
|
||||
} else {
|
||||
try {
|
||||
await fs.unlink(fullPath)
|
||||
Logger.info(TAG, 'Removed daily log: ' + filename)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf.filename !== filename)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf !== filename)
|
||||
} catch (error) {
|
||||
Logger.error(TAG, 'Failed to unlink log file ' + fullPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logToFile(logObj) {
|
||||
/**
|
||||
*
|
||||
* @param {LogObject} logObj
|
||||
*/
|
||||
async logToFile(logObj) {
|
||||
// Fatal crashes get logged to a separate file
|
||||
if (logObj.level === LogLevel.FATAL) {
|
||||
await this.logCrashToFile(logObj)
|
||||
}
|
||||
|
||||
// Buffer when logging before daily logs have been initialized
|
||||
if (!this.currentDailyLog) {
|
||||
this.dailyLogBuffer.push(logObj)
|
||||
return
|
||||
|
|
@ -114,25 +140,39 @@ class LogManager {
|
|||
|
||||
// Check log rolls to next day
|
||||
if (this.currentDailyLog.id !== DailyLog.getCurrentDateString()) {
|
||||
const newDailyLog = new DailyLog()
|
||||
newDailyLog.setData({ dailyLogDirPath: this.DailyLogPath })
|
||||
this.currentDailyLog = newDailyLog
|
||||
this.currentDailyLog = new DailyLog(this.DailyLogPath)
|
||||
if (this.dailyLogFiles.length > this.loggerDailyLogsToKeep) {
|
||||
this.removeOldestLog()
|
||||
// Remove oldest log
|
||||
this.removeLogFile(this.dailyLogFiles[0])
|
||||
}
|
||||
}
|
||||
|
||||
// Append log line to log file
|
||||
this.currentDailyLog.appendLog(logObj)
|
||||
return this.currentDailyLog.appendLog(logObj)
|
||||
}
|
||||
|
||||
socketRequestDailyLogs(socket) {
|
||||
if (!this.currentDailyLog) {
|
||||
return
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {LogObject} logObj
|
||||
*/
|
||||
async logCrashToFile(logObj) {
|
||||
const line = JSON.stringify(logObj) + '\n'
|
||||
|
||||
const lastLogs = this.currentDailyLog.logs.slice(-5000)
|
||||
socket.emit('daily_logs', lastLogs)
|
||||
const logsDir = Path.join(global.MetadataPath, 'logs')
|
||||
await fs.ensureDir(logsDir)
|
||||
const crashLogPath = Path.join(logsDir, 'crash_logs.txt')
|
||||
return fs.writeFile(crashLogPath, line, { flag: "a+" }).catch((error) => {
|
||||
console.log('[LogManager] Appended crash log', error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Most recent 5000 daily logs
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getMostRecentCurrentDailyLogs() {
|
||||
return this.currentDailyLog?.logs.slice(-5000) || ''
|
||||
}
|
||||
}
|
||||
module.exports = LogManager
|
||||
|
|
@ -18,6 +18,19 @@ const Logger = require('../Logger')
|
|||
* @property {string} title
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef SeriesExpandedProperties
|
||||
* @property {{sequence:string}} bookSeries
|
||||
*
|
||||
* @typedef {import('./Series') & SeriesExpandedProperties} SeriesExpanded
|
||||
*
|
||||
* @typedef BookExpandedProperties
|
||||
* @property {import('./Author')[]} authors
|
||||
* @property {SeriesExpanded[]} series
|
||||
*
|
||||
* @typedef {Book & BookExpandedProperties} BookExpanded
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef AudioFileObject
|
||||
* @property {number} index
|
||||
|
|
@ -54,6 +67,8 @@ class Book extends Model {
|
|||
/** @type {string} */
|
||||
this.titleIgnorePrefix
|
||||
/** @type {string} */
|
||||
this.subtitle
|
||||
/** @type {string} */
|
||||
this.publishedYear
|
||||
/** @type {string} */
|
||||
this.publishedDate
|
||||
|
|
|
|||
103
server/models/CustomMetadataProvider.js
Normal file
103
server/models/CustomMetadataProvider.js
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
/**
|
||||
* @typedef ClientCustomMetadataProvider
|
||||
* @property {UUIDV4} id
|
||||
* @property {string} name
|
||||
* @property {string} url
|
||||
* @property {string} slug
|
||||
*/
|
||||
|
||||
class CustomMetadataProvider extends Model {
|
||||
constructor(values, options) {
|
||||
super(values, options)
|
||||
|
||||
/** @type {UUIDV4} */
|
||||
this.id
|
||||
/** @type {string} */
|
||||
this.mediaType
|
||||
/** @type {string} */
|
||||
this.name
|
||||
/** @type {string} */
|
||||
this.url
|
||||
/** @type {string} */
|
||||
this.authHeaderValue
|
||||
/** @type {Object} */
|
||||
this.extraData
|
||||
/** @type {Date} */
|
||||
this.createdAt
|
||||
/** @type {Date} */
|
||||
this.updatedAt
|
||||
}
|
||||
|
||||
getSlug() {
|
||||
return `custom-${this.id}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Safe for clients
|
||||
* @returns {ClientCustomMetadataProvider}
|
||||
*/
|
||||
toClientJson() {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
mediaType: this.mediaType,
|
||||
slug: this.getSlug()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get providers for client by media type
|
||||
* Currently only available for "book" media type
|
||||
*
|
||||
* @param {string} mediaType
|
||||
* @returns {Promise<ClientCustomMetadataProvider[]>}
|
||||
*/
|
||||
static async getForClientByMediaType(mediaType) {
|
||||
if (mediaType !== 'book') return []
|
||||
const customMetadataProviders = await this.findAll({
|
||||
where: {
|
||||
mediaType
|
||||
}
|
||||
})
|
||||
return customMetadataProviders.map(cmp => cmp.toClientJson())
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if provider exists by slug
|
||||
*
|
||||
* @param {string} providerSlug
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
static async checkExistsBySlug(providerSlug) {
|
||||
const providerId = providerSlug?.split?.('custom-')[1]
|
||||
if (!providerId) return false
|
||||
|
||||
return (await this.count({ where: { id: providerId } })) > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
*/
|
||||
static init(sequelize) {
|
||||
super.init({
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true
|
||||
},
|
||||
name: DataTypes.STRING,
|
||||
mediaType: DataTypes.STRING,
|
||||
url: DataTypes.STRING,
|
||||
authHeaderValue: DataTypes.STRING,
|
||||
extraData: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'customMetadataProvider'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CustomMetadataProvider
|
||||
|
|
@ -233,7 +233,7 @@ class Library extends Model {
|
|||
for (let i = 0; i < libraries.length; i++) {
|
||||
const library = libraries[i]
|
||||
if (library.displayOrder !== i + 1) {
|
||||
Logger.dev(`[Library] Updating display order of library from ${library.displayOrder} to ${i + 1}`)
|
||||
Logger.debug(`[Library] Updating display order of library from ${library.displayOrder} to ${i + 1}`)
|
||||
await library.update({ displayOrder: i + 1 }).catch((error) => {
|
||||
Logger.error(`[Library] Failed to update library display order to ${i + 1}`, error)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -15,6 +15,13 @@ const Podcast = require('./Podcast')
|
|||
* @property {{filename:string, ext:string, path:string, relPath:string, size:number, mtimeMs:number, ctimeMs:number, birthtimeMs:number}} metadata
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef LibraryItemExpandedProperties
|
||||
* @property {Book.BookExpanded|Podcast.PodcastExpanded} media
|
||||
*
|
||||
* @typedef {LibraryItem & LibraryItemExpandedProperties} LibraryItemExpanded
|
||||
*/
|
||||
|
||||
class LibraryItem extends Model {
|
||||
constructor(values, options) {
|
||||
super(values, options)
|
||||
|
|
@ -264,7 +271,7 @@ class LibraryItem extends Model {
|
|||
for (const existingPodcastEpisode of existingPodcastEpisodes) {
|
||||
// Episode was removed
|
||||
if (!updatedPodcastEpisodes.some(ep => ep.id === existingPodcastEpisode.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
|
||||
await existingPodcastEpisode.destroy()
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -272,7 +279,7 @@ class LibraryItem extends Model {
|
|||
for (const updatedPodcastEpisode of updatedPodcastEpisodes) {
|
||||
const existingEpisodeMatch = existingPodcastEpisodes.find(ep => ep.id === updatedPodcastEpisode.id)
|
||||
if (!existingEpisodeMatch) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
|
||||
await this.sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode)
|
||||
hasUpdates = true
|
||||
} else {
|
||||
|
|
@ -283,7 +290,7 @@ class LibraryItem extends Model {
|
|||
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
|
||||
|
||||
if (!areEquivalent(updatedEpisodeCleaned[key], existingValue, true)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
|
||||
episodeHasUpdates = true
|
||||
}
|
||||
}
|
||||
|
|
@ -304,7 +311,7 @@ class LibraryItem extends Model {
|
|||
for (const existingAuthor of existingAuthors) {
|
||||
// Author was removed from Book
|
||||
if (!updatedAuthors.some(au => au.id === existingAuthor.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
|
||||
await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -312,7 +319,7 @@ class LibraryItem extends Model {
|
|||
for (const updatedAuthor of updatedAuthors) {
|
||||
// Author was added
|
||||
if (!existingAuthors.some(au => au.id === updatedAuthor.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
|
||||
await this.sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id })
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -320,7 +327,7 @@ class LibraryItem extends Model {
|
|||
for (const existingSeries of existingSeriesAll) {
|
||||
// Series was removed
|
||||
if (!updatedSeriesAll.some(se => se.id === existingSeries.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
|
||||
await this.sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id)
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -329,11 +336,11 @@ class LibraryItem extends Model {
|
|||
// Series was added/updated
|
||||
const existingSeriesMatch = existingSeriesAll.find(se => se.id === updatedSeries.id)
|
||||
if (!existingSeriesMatch) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
|
||||
await this.sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence })
|
||||
hasUpdates = true
|
||||
} else if (existingSeriesMatch.bookSeries.sequence !== updatedSeries.sequence) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
|
||||
await existingSeriesMatch.bookSeries.update({ id: updatedSeries.id, sequence: updatedSeries.sequence })
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -346,7 +353,7 @@ class LibraryItem extends Model {
|
|||
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
|
||||
|
||||
if (!areEquivalent(updatedMedia[key], existingValue, true)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
|
||||
hasMediaUpdates = true
|
||||
}
|
||||
}
|
||||
|
|
@ -363,7 +370,7 @@ class LibraryItem extends Model {
|
|||
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
|
||||
|
||||
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
|
||||
hasLibraryItemUpdates = true
|
||||
}
|
||||
}
|
||||
|
|
@ -412,6 +419,55 @@ class LibraryItem extends Model {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} libraryItemId
|
||||
* @returns {Promise<LibraryItemExpanded>}
|
||||
*/
|
||||
static async getExpandedById(libraryItemId) {
|
||||
if (!libraryItemId) return null
|
||||
|
||||
const libraryItem = await this.findByPk(libraryItemId)
|
||||
if (!libraryItem) {
|
||||
Logger.error(`[LibraryItem] Library item not found with id "${libraryItemId}"`)
|
||||
return null
|
||||
}
|
||||
|
||||
if (libraryItem.mediaType === 'podcast') {
|
||||
libraryItem.media = await libraryItem.getMedia({
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.podcastEpisode
|
||||
}
|
||||
]
|
||||
})
|
||||
} else {
|
||||
libraryItem.media = await libraryItem.getMedia({
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.author,
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
{
|
||||
model: this.sequelize.models.series,
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
}
|
||||
],
|
||||
order: [
|
||||
[this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'],
|
||||
[this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC']
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (!libraryItem.media) return null
|
||||
return libraryItem
|
||||
}
|
||||
|
||||
/**
|
||||
* Get old library item by id
|
||||
* @param {string} libraryItemId
|
||||
|
|
@ -541,7 +597,7 @@ class LibraryItem extends Model {
|
|||
})
|
||||
}
|
||||
}
|
||||
Logger.dev(`Loaded ${itemsInProgressPayload.items.length} of ${itemsInProgressPayload.count} items for "Continue Listening/Reading" in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${itemsInProgressPayload.items.length} of ${itemsInProgressPayload.count} items for "Continue Listening/Reading" in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
|
||||
|
||||
let start = Date.now()
|
||||
if (library.isBook) {
|
||||
|
|
@ -558,7 +614,7 @@ class LibraryItem extends Model {
|
|||
total: continueSeriesPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${continueSeriesPayload.libraryItems.length} of ${continueSeriesPayload.count} items for "Continue Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${continueSeriesPayload.libraryItems.length} of ${continueSeriesPayload.count} items for "Continue Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
} else if (library.isPodcast) {
|
||||
// "Newest Episodes" shelf
|
||||
const newestEpisodesPayload = await libraryFilters.getNewestPodcastEpisodes(library, user, limit)
|
||||
|
|
@ -572,7 +628,7 @@ class LibraryItem extends Model {
|
|||
total: newestEpisodesPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${newestEpisodesPayload.libraryItems.length} of ${newestEpisodesPayload.count} episodes for "Newest Episodes" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${newestEpisodesPayload.libraryItems.length} of ${newestEpisodesPayload.count} episodes for "Newest Episodes" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
start = Date.now()
|
||||
|
|
@ -588,7 +644,7 @@ class LibraryItem extends Model {
|
|||
total: mostRecentPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${mostRecentPayload.libraryItems.length} of ${mostRecentPayload.count} items for "Recently Added" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${mostRecentPayload.libraryItems.length} of ${mostRecentPayload.count} items for "Recently Added" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
|
||||
if (library.isBook) {
|
||||
start = Date.now()
|
||||
|
|
@ -604,7 +660,7 @@ class LibraryItem extends Model {
|
|||
total: seriesMostRecentPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${seriesMostRecentPayload.series.length} of ${seriesMostRecentPayload.count} series for "Recent Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${seriesMostRecentPayload.series.length} of ${seriesMostRecentPayload.count} series for "Recent Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
|
||||
start = Date.now()
|
||||
// "Discover" shelf
|
||||
|
|
@ -619,7 +675,7 @@ class LibraryItem extends Model {
|
|||
total: discoverLibraryItemsPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${discoverLibraryItemsPayload.libraryItems.length} of ${discoverLibraryItemsPayload.count} items for "Discover" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${discoverLibraryItemsPayload.libraryItems.length} of ${discoverLibraryItemsPayload.count} items for "Discover" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
start = Date.now()
|
||||
|
|
@ -650,7 +706,7 @@ class LibraryItem extends Model {
|
|||
})
|
||||
}
|
||||
}
|
||||
Logger.dev(`Loaded ${mediaFinishedPayload.items.length} of ${mediaFinishedPayload.count} items for "Listen/Read Again" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${mediaFinishedPayload.items.length} of ${mediaFinishedPayload.count} items for "Listen/Read Again" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
|
||||
if (library.isBook) {
|
||||
start = Date.now()
|
||||
|
|
@ -666,7 +722,7 @@ class LibraryItem extends Model {
|
|||
total: newestAuthorsPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${newestAuthorsPayload.authors.length} of ${newestAuthorsPayload.count} authors for "Newest Authors" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${newestAuthorsPayload.authors.length} of ${newestAuthorsPayload.count} authors for "Newest Authors" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
Logger.debug(`Loaded ${shelves.length} personalized shelves in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
|
||||
|
|
|
|||
|
|
@ -118,7 +118,9 @@ class PlaybackSession extends Model {
|
|||
|
||||
static createFromOld(oldPlaybackSession) {
|
||||
const playbackSession = this.getFromOld(oldPlaybackSession)
|
||||
return this.create(playbackSession)
|
||||
return this.create(playbackSession, {
|
||||
silent: true
|
||||
})
|
||||
}
|
||||
|
||||
static updateFromOld(oldPlaybackSession) {
|
||||
|
|
@ -126,7 +128,8 @@ class PlaybackSession extends Model {
|
|||
return this.update(playbackSession, {
|
||||
where: {
|
||||
id: playbackSession.id
|
||||
}
|
||||
},
|
||||
silent: true
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,12 @@
|
|||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
/**
|
||||
* @typedef PodcastExpandedProperties
|
||||
* @property {import('./PodcastEpisode')[]} podcastEpisodes
|
||||
*
|
||||
* @typedef {Podcast & PodcastExpandedProperties} PodcastExpanded
|
||||
*/
|
||||
|
||||
class Podcast extends Model {
|
||||
constructor(values, options) {
|
||||
super(values, options)
|
||||
|
|
|
|||
|
|
@ -1,23 +1,28 @@
|
|||
const Path = require('path')
|
||||
const date = require('../libs/dateAndTime')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
class DailyLog {
|
||||
constructor() {
|
||||
this.id = null
|
||||
this.datePretty = null
|
||||
/**
|
||||
*
|
||||
* @param {string} dailyLogDirPath Path to daily logs /metadata/logs/daily
|
||||
*/
|
||||
constructor(dailyLogDirPath) {
|
||||
this.id = date.format(new Date(), 'YYYY-MM-DD')
|
||||
|
||||
this.dailyLogDirPath = null
|
||||
this.filename = null
|
||||
this.path = null
|
||||
this.fullPath = null
|
||||
this.dailyLogDirPath = dailyLogDirPath
|
||||
this.filename = this.id + '.txt'
|
||||
this.fullPath = Path.join(this.dailyLogDirPath, this.filename)
|
||||
|
||||
this.createdAt = null
|
||||
this.createdAt = Date.now()
|
||||
|
||||
/** @type {import('../managers/LogManager').LogObject[]} */
|
||||
this.logs = []
|
||||
/** @type {string[]} */
|
||||
this.bufferedLogLines = []
|
||||
|
||||
this.locked = false
|
||||
}
|
||||
|
||||
|
|
@ -32,8 +37,6 @@ class DailyLog {
|
|||
toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
datePretty: this.datePretty,
|
||||
path: this.path,
|
||||
dailyLogDirPath: this.dailyLogDirPath,
|
||||
fullPath: this.fullPath,
|
||||
filename: this.filename,
|
||||
|
|
@ -41,36 +44,34 @@ class DailyLog {
|
|||
}
|
||||
}
|
||||
|
||||
setData(data) {
|
||||
this.id = date.format(new Date(), 'YYYY-MM-DD')
|
||||
this.datePretty = date.format(new Date(), 'ddd, MMM D YYYY')
|
||||
|
||||
this.dailyLogDirPath = data.dailyLogDirPath
|
||||
|
||||
this.filename = this.id + '.txt'
|
||||
this.path = Path.join('backups', this.filename)
|
||||
this.fullPath = Path.join(this.dailyLogDirPath, this.filename)
|
||||
|
||||
this.createdAt = Date.now()
|
||||
}
|
||||
|
||||
async appendBufferedLogs() {
|
||||
var buffered = [...this.bufferedLogLines]
|
||||
/**
|
||||
* Append all buffered lines to daily log file
|
||||
*/
|
||||
appendBufferedLogs() {
|
||||
let buffered = [...this.bufferedLogLines]
|
||||
this.bufferedLogLines = []
|
||||
|
||||
var oneBigLog = ''
|
||||
let oneBigLog = ''
|
||||
buffered.forEach((logLine) => {
|
||||
oneBigLog += logLine
|
||||
})
|
||||
this.appendLogLine(oneBigLog)
|
||||
return this.appendLogLine(oneBigLog)
|
||||
}
|
||||
|
||||
async appendLog(logObj) {
|
||||
/**
|
||||
*
|
||||
* @param {import('../managers/LogManager').LogObject} logObj
|
||||
*/
|
||||
appendLog(logObj) {
|
||||
this.logs.push(logObj)
|
||||
var line = JSON.stringify(logObj) + '\n'
|
||||
this.appendLogLine(line)
|
||||
return this.appendLogLine(JSON.stringify(logObj) + '\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Append log to daily log file
|
||||
*
|
||||
* @param {string} line
|
||||
*/
|
||||
async appendLogLine(line) {
|
||||
if (this.locked) {
|
||||
this.bufferedLogLines.push(line)
|
||||
|
|
@ -84,24 +85,29 @@ class DailyLog {
|
|||
|
||||
this.locked = false
|
||||
if (this.bufferedLogLines.length) {
|
||||
this.appendBufferedLogs()
|
||||
await this.appendBufferedLogs()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all logs from file
|
||||
* Parses lines and re-saves the file if bad lines are removed
|
||||
*/
|
||||
async loadLogs() {
|
||||
var exists = await fs.pathExists(this.fullPath)
|
||||
if (!exists) {
|
||||
if (!await fs.pathExists(this.fullPath)) {
|
||||
console.error('Daily log does not exist')
|
||||
return
|
||||
}
|
||||
|
||||
var text = await readTextFile(this.fullPath)
|
||||
const text = await fileUtils.readTextFile(this.fullPath)
|
||||
|
||||
var hasFailures = false
|
||||
let hasFailures = false
|
||||
|
||||
var logLines = text.split(/\r?\n/)
|
||||
let logLines = text.split(/\r?\n/)
|
||||
// remove last log if empty
|
||||
if (logLines.length && !logLines[logLines.length - 1]) logLines = logLines.slice(0, -1)
|
||||
|
||||
// JSON parse log lines
|
||||
this.logs = logLines.map(t => {
|
||||
if (!t) {
|
||||
hasFailures = true
|
||||
|
|
@ -118,7 +124,7 @@ class DailyLog {
|
|||
|
||||
// Rewrite log file to remove errors
|
||||
if (hasFailures) {
|
||||
var newLogLines = this.logs.map(l => JSON.stringify(l)).join('\n') + '\n'
|
||||
const newLogLines = this.logs.map(l => JSON.stringify(l)).join('\n') + '\n'
|
||||
await fs.writeFile(this.fullPath, newLogLines)
|
||||
console.log('Re-Saved log file to remove bad lines')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -84,6 +84,24 @@ class Feed {
|
|||
return episode.fullPath
|
||||
}
|
||||
|
||||
/**
|
||||
* If chapters for an audiobook match the audio tracks then use chapter titles instead of audio file names
|
||||
*
|
||||
* @param {import('../objects/LibraryItem')} libraryItem
|
||||
* @returns {boolean}
|
||||
*/
|
||||
checkUseChapterTitlesForEpisodes(libraryItem) {
|
||||
const tracks = libraryItem.media.tracks
|
||||
const chapters = libraryItem.media.chapters
|
||||
if (tracks.length !== chapters.length) return false
|
||||
for (let i = 0; i < tracks.length; i++) {
|
||||
if (Math.abs(chapters[i].start - tracks[i].startOffset) >= 1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
setFromItem(userId, slug, libraryItem, serverAddress, preventIndexing = true, ownerName = null, ownerEmail = null) {
|
||||
const media = libraryItem.media
|
||||
const mediaMetadata = media.metadata
|
||||
|
|
@ -128,9 +146,10 @@ class Feed {
|
|||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
} else { // AUDIOBOOK EPISODES
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(libraryItem)
|
||||
media.tracks.forEach((audioTrack) => {
|
||||
const feedEpisode = new FeedEpisode()
|
||||
feedEpisode.setFromAudiobookTrack(libraryItem, serverAddress, slug, audioTrack, this.meta)
|
||||
feedEpisode.setFromAudiobookTrack(libraryItem, serverAddress, slug, audioTrack, this.meta, useChapterTitles)
|
||||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
}
|
||||
|
|
@ -168,9 +187,10 @@ class Feed {
|
|||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
} else { // AUDIOBOOK EPISODES
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(libraryItem)
|
||||
media.tracks.forEach((audioTrack) => {
|
||||
const feedEpisode = new FeedEpisode()
|
||||
feedEpisode.setFromAudiobookTrack(libraryItem, this.serverAddress, this.slug, audioTrack, this.meta)
|
||||
feedEpisode.setFromAudiobookTrack(libraryItem, this.serverAddress, this.slug, audioTrack, this.meta, useChapterTitles)
|
||||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
}
|
||||
|
|
@ -214,9 +234,10 @@ class Feed {
|
|||
itemsWithTracks.forEach((item, index) => {
|
||||
if (item.updatedAt > this.entityUpdatedAt) this.entityUpdatedAt = item.updatedAt
|
||||
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(item)
|
||||
item.media.tracks.forEach((audioTrack) => {
|
||||
const feedEpisode = new FeedEpisode()
|
||||
feedEpisode.setFromAudiobookTrack(item, serverAddress, slug, audioTrack, this.meta, index)
|
||||
feedEpisode.setFromAudiobookTrack(item, serverAddress, slug, audioTrack, this.meta, useChapterTitles, index)
|
||||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
})
|
||||
|
|
@ -245,9 +266,10 @@ class Feed {
|
|||
itemsWithTracks.forEach((item, index) => {
|
||||
if (item.updatedAt > this.entityUpdatedAt) this.entityUpdatedAt = item.updatedAt
|
||||
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(item)
|
||||
item.media.tracks.forEach((audioTrack) => {
|
||||
const feedEpisode = new FeedEpisode()
|
||||
feedEpisode.setFromAudiobookTrack(item, this.serverAddress, this.slug, audioTrack, this.meta, index)
|
||||
feedEpisode.setFromAudiobookTrack(item, this.serverAddress, this.slug, audioTrack, this.meta, useChapterTitles, index)
|
||||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
})
|
||||
|
|
@ -295,9 +317,10 @@ class Feed {
|
|||
itemsWithTracks.forEach((item, index) => {
|
||||
if (item.updatedAt > this.entityUpdatedAt) this.entityUpdatedAt = item.updatedAt
|
||||
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(item)
|
||||
item.media.tracks.forEach((audioTrack) => {
|
||||
const feedEpisode = new FeedEpisode()
|
||||
feedEpisode.setFromAudiobookTrack(item, serverAddress, slug, audioTrack, this.meta, index)
|
||||
feedEpisode.setFromAudiobookTrack(item, serverAddress, slug, audioTrack, this.meta, useChapterTitles, index)
|
||||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
})
|
||||
|
|
@ -329,9 +352,10 @@ class Feed {
|
|||
itemsWithTracks.forEach((item, index) => {
|
||||
if (item.updatedAt > this.entityUpdatedAt) this.entityUpdatedAt = item.updatedAt
|
||||
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(item)
|
||||
item.media.tracks.forEach((audioTrack) => {
|
||||
const feedEpisode = new FeedEpisode()
|
||||
feedEpisode.setFromAudiobookTrack(item, this.serverAddress, this.slug, audioTrack, this.meta, index)
|
||||
feedEpisode.setFromAudiobookTrack(item, this.serverAddress, this.slug, audioTrack, this.meta, useChapterTitles, index)
|
||||
this.episodes.push(feedEpisode)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -97,7 +97,17 @@ class FeedEpisode {
|
|||
this.fullPath = episode.audioFile.metadata.path
|
||||
}
|
||||
|
||||
setFromAudiobookTrack(libraryItem, serverAddress, slug, audioTrack, meta, additionalOffset = null) {
|
||||
/**
|
||||
*
|
||||
* @param {import('../objects/LibraryItem')} libraryItem
|
||||
* @param {string} serverAddress
|
||||
* @param {string} slug
|
||||
* @param {import('../objects/files/AudioTrack')} audioTrack
|
||||
* @param {Object} meta
|
||||
* @param {boolean} useChapterTitles
|
||||
* @param {number} [additionalOffset]
|
||||
*/
|
||||
setFromAudiobookTrack(libraryItem, serverAddress, slug, audioTrack, meta, useChapterTitles, additionalOffset = null) {
|
||||
// Example: <pubDate>Fri, 04 Feb 2015 00:00:00 GMT</pubDate>
|
||||
let timeOffset = isNaN(audioTrack.index) ? 0 : (Number(audioTrack.index) * 1000) // Offset pubdate to ensure correct order
|
||||
let episodeId = uuidv4()
|
||||
|
|
@ -119,10 +129,10 @@ class FeedEpisode {
|
|||
if (libraryItem.media.tracks.length == 1) { // If audiobook is a single file, use book title instead of chapter/file title
|
||||
title = libraryItem.media.metadata.title
|
||||
} else {
|
||||
if (libraryItem.media.chapters.length) {
|
||||
if (useChapterTitles) {
|
||||
// If audio track start and chapter start are within 1 seconds of eachother then use the chapter title
|
||||
var matchingChapter = libraryItem.media.chapters.find(ch => Math.abs(ch.start - audioTrack.startOffset) < 1)
|
||||
if (matchingChapter && matchingChapter.title) title = matchingChapter.title
|
||||
const matchingChapter = libraryItem.media.chapters.find(ch => Math.abs(ch.start - audioTrack.startOffset) < 1)
|
||||
if (matchingChapter?.title) title = matchingChapter.title
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class ServerSettings {
|
|||
this.buildNumber = packageJson.buildNumber
|
||||
|
||||
// Auth settings
|
||||
// Active auth methodes
|
||||
this.authLoginCustomMessage = null
|
||||
this.authActiveAuthMethods = ['local']
|
||||
|
||||
// openid settings
|
||||
|
|
@ -113,6 +113,7 @@ class ServerSettings {
|
|||
this.version = settings.version || null
|
||||
this.buildNumber = settings.buildNumber || 0 // Added v2.4.5
|
||||
|
||||
this.authLoginCustomMessage = settings.authLoginCustomMessage || null // Added v2.7.3
|
||||
this.authActiveAuthMethods = settings.authActiveAuthMethods || ['local']
|
||||
|
||||
this.authOpenIDIssuerURL = settings.authOpenIDIssuerURL || null
|
||||
|
|
@ -201,6 +202,7 @@ class ServerSettings {
|
|||
logLevel: this.logLevel,
|
||||
version: this.version,
|
||||
buildNumber: this.buildNumber,
|
||||
authLoginCustomMessage: this.authLoginCustomMessage,
|
||||
authActiveAuthMethods: this.authActiveAuthMethods,
|
||||
authOpenIDIssuerURL: this.authOpenIDIssuerURL,
|
||||
authOpenIDAuthorizationURL: this.authOpenIDAuthorizationURL,
|
||||
|
|
@ -213,7 +215,7 @@ class ServerSettings {
|
|||
authOpenIDButtonText: this.authOpenIDButtonText,
|
||||
authOpenIDAutoLaunch: this.authOpenIDAutoLaunch,
|
||||
authOpenIDAutoRegister: this.authOpenIDAutoRegister,
|
||||
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
|
||||
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
|
||||
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs // Do not return to client
|
||||
}
|
||||
}
|
||||
|
|
@ -246,6 +248,7 @@ class ServerSettings {
|
|||
|
||||
get authenticationSettings() {
|
||||
return {
|
||||
authLoginCustomMessage: this.authLoginCustomMessage,
|
||||
authActiveAuthMethods: this.authActiveAuthMethods,
|
||||
authOpenIDIssuerURL: this.authOpenIDIssuerURL,
|
||||
authOpenIDAuthorizationURL: this.authOpenIDAuthorizationURL,
|
||||
|
|
@ -264,7 +267,9 @@ class ServerSettings {
|
|||
}
|
||||
|
||||
get authFormData() {
|
||||
const clientFormData = {}
|
||||
const clientFormData = {
|
||||
authLoginCustomMessage: this.authLoginCustomMessage
|
||||
}
|
||||
if (this.authActiveAuthMethods.includes('openid')) {
|
||||
clientFormData.authOpenIDButtonText = this.authOpenIDButtonText
|
||||
clientFormData.authOpenIDAutoLaunch = this.authOpenIDAutoLaunch
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class AudiobookCovers {
|
|||
Logger.error('[AudiobookCovers] Cover search error', error)
|
||||
return []
|
||||
})
|
||||
return items.map(item => ({ cover: item.filename }))
|
||||
return items.map(item => ({ cover: item.versions.png.original }))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
93
server/providers/CustomProviderAdapter.js
Normal file
93
server/providers/CustomProviderAdapter.js
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
const Database = require('../Database')
|
||||
const axios = require('axios')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
class CustomProviderAdapter {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
* @param {string} providerSlug
|
||||
* @param {string} mediaType
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async search(title, author, providerSlug, mediaType) {
|
||||
const providerId = providerSlug.split('custom-')[1]
|
||||
const provider = await Database.customMetadataProviderModel.findByPk(providerId)
|
||||
|
||||
if (!provider) {
|
||||
throw new Error("Custom provider not found for the given id")
|
||||
}
|
||||
|
||||
// Setup query params
|
||||
const queryObj = {
|
||||
mediaType,
|
||||
query: title
|
||||
}
|
||||
if (author) {
|
||||
queryObj.author = author
|
||||
}
|
||||
const queryString = (new URLSearchParams(queryObj)).toString()
|
||||
|
||||
// Setup headers
|
||||
const axiosOptions = {}
|
||||
if (provider.authHeaderValue) {
|
||||
axiosOptions.headers = {
|
||||
'Authorization': provider.authHeaderValue
|
||||
}
|
||||
}
|
||||
|
||||
const matches = await axios.get(`${provider.url}/search?${queryString}}`, axiosOptions).then((res) => {
|
||||
if (!res?.data || !Array.isArray(res.data.matches)) return null
|
||||
return res.data.matches
|
||||
}).catch(error => {
|
||||
Logger.error('[CustomMetadataProvider] Search error', error)
|
||||
return []
|
||||
})
|
||||
|
||||
if (!matches) {
|
||||
throw new Error("Custom provider returned malformed response")
|
||||
}
|
||||
|
||||
// re-map keys to throw out
|
||||
return matches.map(({
|
||||
title,
|
||||
subtitle,
|
||||
author,
|
||||
narrator,
|
||||
publisher,
|
||||
publishedYear,
|
||||
description,
|
||||
cover,
|
||||
isbn,
|
||||
asin,
|
||||
genres,
|
||||
tags,
|
||||
series,
|
||||
language,
|
||||
duration
|
||||
}) => {
|
||||
return {
|
||||
title,
|
||||
subtitle,
|
||||
author,
|
||||
narrator,
|
||||
publisher,
|
||||
publishedYear,
|
||||
description,
|
||||
cover,
|
||||
isbn,
|
||||
asin,
|
||||
genres,
|
||||
tags: tags?.join(',') || null,
|
||||
series: series?.length ? series : null,
|
||||
language,
|
||||
duration
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CustomProviderAdapter
|
||||
|
|
@ -28,6 +28,7 @@ const SearchController = require('../controllers/SearchController')
|
|||
const CacheController = require('../controllers/CacheController')
|
||||
const ToolsController = require('../controllers/ToolsController')
|
||||
const RSSFeedController = require('../controllers/RSSFeedController')
|
||||
const CustomMetadataProviderController = require('../controllers/CustomMetadataProviderController')
|
||||
const MiscController = require('../controllers/MiscController')
|
||||
|
||||
const Author = require('../objects/entities/Author')
|
||||
|
|
@ -299,6 +300,14 @@ class ApiRouter {
|
|||
this.router.post('/feeds/series/:seriesId/open', RSSFeedController.middleware.bind(this), RSSFeedController.openRSSFeedForSeries.bind(this))
|
||||
this.router.post('/feeds/:id/close', RSSFeedController.middleware.bind(this), RSSFeedController.closeRSSFeed.bind(this))
|
||||
|
||||
//
|
||||
// Custom Metadata Provider routes
|
||||
//
|
||||
this.router.get('/custom-metadata-providers', CustomMetadataProviderController.middleware.bind(this), CustomMetadataProviderController.getAll.bind(this))
|
||||
this.router.post('/custom-metadata-providers', CustomMetadataProviderController.middleware.bind(this), CustomMetadataProviderController.create.bind(this))
|
||||
this.router.delete('/custom-metadata-providers/:id', CustomMetadataProviderController.middleware.bind(this), CustomMetadataProviderController.delete.bind(this))
|
||||
|
||||
|
||||
//
|
||||
// Misc Routes
|
||||
//
|
||||
|
|
@ -318,6 +327,7 @@ class ApiRouter {
|
|||
this.router.patch('/auth-settings', MiscController.updateAuthSettings.bind(this))
|
||||
this.router.post('/watcher/update', MiscController.updateWatchedPath.bind(this))
|
||||
this.router.get('/stats/year/:year', MiscController.getAdminStatsForYear.bind(this))
|
||||
this.router.get('/logger-data', MiscController.getLoggerData.bind(this))
|
||||
}
|
||||
|
||||
//
|
||||
|
|
|
|||
|
|
@ -36,6 +36,8 @@ class AbsMetadataFileScanner {
|
|||
for (const key in abMetadata) {
|
||||
// TODO: When to override with null or empty arrays?
|
||||
if (abMetadata[key] === undefined || abMetadata[key] === null) continue
|
||||
if (key === 'authors' && !abMetadata.authors?.length) continue
|
||||
if (key === 'genres' && !abMetadata.genres?.length) continue
|
||||
if (key === 'tags' && !abMetadata.tags?.length) continue
|
||||
if (key === 'chapters' && !abMetadata.chapters?.length) continue
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ const Path = require('path')
|
|||
const sequelize = require('sequelize')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { getTitleIgnorePrefix, areEquivalent } = require('../utils/index')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const parseNameString = require('../utils/parsers/parseNameString')
|
||||
const parseEbookMetadata = require('../utils/parsers/parseEbookMetadata')
|
||||
const globals = require('../utils/globals')
|
||||
const AudioFileScanner = require('./AudioFileScanner')
|
||||
const Database = require('../Database')
|
||||
|
|
@ -170,7 +170,9 @@ class BookScanner {
|
|||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
|
||||
const ebookFileScanData = await parseEbookMetadata.parse(media.ebookFile)
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
|
||||
let authorsUpdated = false
|
||||
const bookAuthorsRemoved = []
|
||||
let seriesUpdated = false
|
||||
|
|
@ -317,24 +319,34 @@ class BookScanner {
|
|||
})
|
||||
}
|
||||
|
||||
// If no cover then extract cover from audio file if available OR search for cover if enabled in server settings
|
||||
// If no cover then extract cover from audio file OR from ebook
|
||||
const libraryItemDir = existingLibraryItem.isFile ? null : existingLibraryItem.path
|
||||
if (!media.coverPath) {
|
||||
const libraryItemDir = existingLibraryItem.isFile ? null : existingLibraryItem.path
|
||||
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(media.audioFiles, existingLibraryItem.id, libraryItemDir)
|
||||
let extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(media.audioFiles, existingLibraryItem.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" extracted embedded cover art from audio file to path "${extractedCoverPath}"`)
|
||||
media.coverPath = extractedCoverPath
|
||||
hasMediaChanges = true
|
||||
} else if (Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = media.authors.map(au => au.name).filter(au => au).join(', ')
|
||||
const coverPath = await this.searchForCover(existingLibraryItem.id, libraryItemDir, media.title, authorName, libraryScan)
|
||||
if (coverPath) {
|
||||
media.coverPath = coverPath
|
||||
} else if (ebookFileScanData?.ebookCoverPath) {
|
||||
extractedCoverPath = await CoverManager.saveEbookCoverArt(ebookFileScanData, existingLibraryItem.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" extracted embedded cover art from ebook file to path "${extractedCoverPath}"`)
|
||||
media.coverPath = extractedCoverPath
|
||||
hasMediaChanges = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no cover then search for cover if enabled in server settings
|
||||
if (!media.coverPath && Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = media.authors.map(au => au.name).filter(au => au).join(', ')
|
||||
const coverPath = await this.searchForCover(existingLibraryItem.id, libraryItemDir, media.title, authorName, libraryScan)
|
||||
if (coverPath) {
|
||||
media.coverPath = coverPath
|
||||
hasMediaChanges = true
|
||||
}
|
||||
}
|
||||
|
||||
existingLibraryItem.media = media
|
||||
|
||||
let libraryItemUpdated = false
|
||||
|
|
@ -408,12 +420,14 @@ class BookScanner {
|
|||
return null
|
||||
}
|
||||
|
||||
let ebookFileScanData = null
|
||||
if (ebookLibraryFile) {
|
||||
ebookLibraryFile = ebookLibraryFile.toJSON()
|
||||
ebookLibraryFile.ebookFormat = ebookLibraryFile.metadata.ext.slice(1).toLowerCase()
|
||||
ebookFileScanData = await parseEbookMetadata.parse(ebookLibraryFile)
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, libraryItemData, libraryScan, librarySettings)
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings)
|
||||
bookMetadata.explicit = !!bookMetadata.explicit // Ensure boolean
|
||||
bookMetadata.abridged = !!bookMetadata.abridged // Ensure boolean
|
||||
|
||||
|
|
@ -481,19 +495,28 @@ class BookScanner {
|
|||
}
|
||||
}
|
||||
|
||||
// If cover was not found in folder then check embedded covers in audio files OR search for cover
|
||||
// If cover was not found in folder then check embedded covers in audio files OR ebook file
|
||||
const libraryItemDir = libraryItemObj.isFile ? null : libraryItemObj.path
|
||||
if (!bookObject.coverPath) {
|
||||
const libraryItemDir = libraryItemObj.isFile ? null : libraryItemObj.path
|
||||
// Extract and save embedded cover art
|
||||
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(scannedAudioFiles, libraryItemObj.id, libraryItemDir)
|
||||
let extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(scannedAudioFiles, libraryItemObj.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Extracted embedded cover from audio file at "${extractedCoverPath}" for book "${bookObject.title}"`)
|
||||
bookObject.coverPath = extractedCoverPath
|
||||
} else if (Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = bookMetadata.authors.join(', ')
|
||||
bookObject.coverPath = await this.searchForCover(libraryItemObj.id, libraryItemDir, bookObject.title, authorName, libraryScan)
|
||||
} else if (ebookFileScanData?.ebookCoverPath) {
|
||||
extractedCoverPath = await CoverManager.saveEbookCoverArt(ebookFileScanData, libraryItemObj.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Extracted embedded cover from ebook file at "${extractedCoverPath}" for book "${bookObject.title}"`)
|
||||
bookObject.coverPath = extractedCoverPath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If cover not found then search for cover if enabled in settings
|
||||
if (!bookObject.coverPath && Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = bookMetadata.authors.join(', ')
|
||||
bookObject.coverPath = await this.searchForCover(libraryItemObj.id, libraryItemDir, bookObject.title, authorName, libraryScan)
|
||||
}
|
||||
|
||||
libraryItemObj.book = bookObject
|
||||
const libraryItem = await Database.libraryItemModel.create(libraryItemObj, {
|
||||
include: {
|
||||
|
|
@ -570,13 +593,14 @@ class BookScanner {
|
|||
/**
|
||||
*
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {string} [existingLibraryItemId]
|
||||
* @returns {Promise<BookMetadataObject>}
|
||||
*/
|
||||
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan, librarySettings, existingLibraryItemId = null) {
|
||||
async getBookMetadataFromScanData(audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItemId = null) {
|
||||
// First set book metadata from folder/file names
|
||||
const bookMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title, // required
|
||||
|
|
@ -599,7 +623,7 @@ class BookScanner {
|
|||
coverPath: undefined
|
||||
}
|
||||
|
||||
const bookMetadataSourceHandler = new BookScanner.BookMetadataSourceHandler(bookMetadata, audioFiles, libraryItemData, libraryScan, existingLibraryItemId)
|
||||
const bookMetadataSourceHandler = new BookScanner.BookMetadataSourceHandler(bookMetadata, audioFiles, ebookFileScanData, libraryItemData, libraryScan, existingLibraryItemId)
|
||||
const metadataPrecedence = librarySettings.metadataPrecedence || ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
libraryScan.addLog(LogLevel.DEBUG, `"${bookMetadata.title}" Getting metadata with precedence [${metadataPrecedence.join(', ')}]`)
|
||||
for (const metadataSource of metadataPrecedence) {
|
||||
|
|
@ -627,13 +651,15 @@ class BookScanner {
|
|||
*
|
||||
* @param {Object} bookMetadata
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {string} existingLibraryItemId
|
||||
*/
|
||||
constructor(bookMetadata, audioFiles, libraryItemData, libraryScan, existingLibraryItemId) {
|
||||
constructor(bookMetadata, audioFiles, ebookFileScanData, libraryItemData, libraryScan, existingLibraryItemId) {
|
||||
this.bookMetadata = bookMetadata
|
||||
this.audioFiles = audioFiles
|
||||
this.ebookFileScanData = ebookFileScanData
|
||||
this.libraryItemData = libraryItemData
|
||||
this.libraryScan = libraryScan
|
||||
this.existingLibraryItemId = existingLibraryItemId
|
||||
|
|
@ -647,13 +673,42 @@ class BookScanner {
|
|||
}
|
||||
|
||||
/**
|
||||
* Metadata from audio file meta tags
|
||||
* Metadata from audio file meta tags OR metadata from ebook file
|
||||
*/
|
||||
audioMetatags() {
|
||||
if (!this.audioFiles.length) return
|
||||
// Modifies bookMetadata with metadata mapped from audio file meta tags
|
||||
const bookTitle = this.bookMetadata.title || this.libraryItemData.mediaMetadata.title
|
||||
AudioFileScanner.setBookMetadataFromAudioMetaTags(bookTitle, this.audioFiles, this.bookMetadata, this.libraryScan)
|
||||
if (this.audioFiles.length) {
|
||||
// Modifies bookMetadata with metadata mapped from audio file meta tags
|
||||
const bookTitle = this.bookMetadata.title || this.libraryItemData.mediaMetadata.title
|
||||
AudioFileScanner.setBookMetadataFromAudioMetaTags(bookTitle, this.audioFiles, this.bookMetadata, this.libraryScan)
|
||||
} else if (this.ebookFileScanData) {
|
||||
const ebookMetdataObject = this.ebookFileScanData.metadata || {}
|
||||
for (const key in ebookMetdataObject) {
|
||||
if (key === 'tags') {
|
||||
if (ebookMetdataObject.tags.length) {
|
||||
this.bookMetadata.tags = ebookMetdataObject.tags
|
||||
}
|
||||
} else if (key === 'genres') {
|
||||
if (ebookMetdataObject.genres.length) {
|
||||
this.bookMetadata.genres = ebookMetdataObject.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (ebookMetdataObject.authors?.length) {
|
||||
this.bookMetadata.authors = ebookMetdataObject.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (ebookMetdataObject.narrators?.length) {
|
||||
this.bookMetadata.narrators = ebookMetdataObject.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (ebookMetdataObject.series?.length) {
|
||||
this.bookMetadata.series = ebookMetdataObject.series
|
||||
}
|
||||
} else if (ebookMetdataObject[key] && key !== 'sequence') {
|
||||
this.bookMetadata[key] = ebookMetdataObject[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -134,10 +134,13 @@ class LibraryScan {
|
|||
}
|
||||
|
||||
async saveLog() {
|
||||
await Logger.logManager.ensureScanLogDir()
|
||||
const scanLogDir = Path.join(global.MetadataPath, 'logs', 'scans')
|
||||
|
||||
const logDir = Path.join(global.MetadataPath, 'logs', 'scans')
|
||||
const outputPath = Path.join(logDir, this.logFilename)
|
||||
if (!(await fs.pathExists(scanLogDir))) {
|
||||
await fs.mkdir(scanLogDir)
|
||||
}
|
||||
|
||||
const outputPath = Path.join(scanLogDir, this.logFilename)
|
||||
const logLines = [JSON.stringify(this.toJSON())]
|
||||
this.logs.forEach(l => {
|
||||
logLines.push(JSON.stringify(l))
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ const uuidv4 = require("uuid").v4
|
|||
const Path = require('path')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { getTitleIgnorePrefix } = require('../utils/index')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const AudioFileScanner = require('./AudioFileScanner')
|
||||
const Database = require('../Database')
|
||||
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
|
|
|
|||
|
|
@ -101,8 +101,8 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
})
|
||||
if (!response) return resolve(false)
|
||||
|
||||
|
||||
const ffmpeg = Ffmpeg(response.data)
|
||||
ffmpeg.addOption('-loglevel debug') // Debug logs printed on error
|
||||
ffmpeg.outputOptions(
|
||||
'-c', 'copy',
|
||||
'-metadata', 'podcast=1'
|
||||
|
|
@ -110,6 +110,7 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
|
||||
const podcastMetadata = podcastEpisodeDownload.libraryItem.media.metadata
|
||||
const podcastEpisode = podcastEpisodeDownload.podcastEpisode
|
||||
const finalSizeInBytes = Number(podcastEpisode.enclosure?.length || 0)
|
||||
|
||||
const taggings = {
|
||||
'album': podcastMetadata.title,
|
||||
|
|
@ -147,13 +148,30 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
|
||||
ffmpeg.addOutput(podcastEpisodeDownload.targetPath)
|
||||
|
||||
const stderrLines = []
|
||||
ffmpeg.on('stderr', (stderrLine) => {
|
||||
if (typeof stderrLine === 'string') {
|
||||
stderrLines.push(stderrLine)
|
||||
}
|
||||
})
|
||||
ffmpeg.on('start', (cmd) => {
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Cmd: ${cmd}`)
|
||||
})
|
||||
ffmpeg.on('error', (err, stdout, stderr) => {
|
||||
Logger.error(`[FfmpegHelpers] downloadPodcastEpisode: Error ${err} ${stdout} ${stderr}`)
|
||||
ffmpeg.on('error', (err) => {
|
||||
Logger.error(`[FfmpegHelpers] downloadPodcastEpisode: Error ${err}`)
|
||||
if (stderrLines.length) {
|
||||
Logger.error(`Full stderr dump for episode url "${podcastEpisodeDownload.url}": ${stderrLines.join('\n')}`)
|
||||
}
|
||||
resolve(false)
|
||||
})
|
||||
ffmpeg.on('progress', (progress) => {
|
||||
let progressPercent = 0
|
||||
if (finalSizeInBytes && progress.targetSize && !isNaN(progress.targetSize)) {
|
||||
const finalSizeInKb = Math.floor(finalSizeInBytes / 1000)
|
||||
progressPercent = Math.min(1, progress.targetSize / finalSizeInKb) * 100
|
||||
}
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Progress estimate ${progressPercent.toFixed(0)}% (${progress?.targetSize || 'N/A'} KB) for "${podcastEpisodeDownload.url}"`)
|
||||
})
|
||||
ffmpeg.on('end', () => {
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Complete`)
|
||||
resolve(podcastEpisodeDownload.targetPath)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
const xml = require('../../libs/xml')
|
||||
const escapeForXML = require('../../libs/xml/escapeForXML')
|
||||
|
||||
/**
|
||||
* Generate OPML file string for podcasts in a library
|
||||
|
|
@ -12,18 +13,18 @@ module.exports.generate = (podcasts, indent = true) => {
|
|||
if (!podcast.feedURL) return
|
||||
const feedAttributes = {
|
||||
type: 'rss',
|
||||
text: podcast.title,
|
||||
title: podcast.title,
|
||||
xmlUrl: podcast.feedURL
|
||||
text: escapeForXML(podcast.title),
|
||||
title: escapeForXML(podcast.title),
|
||||
xmlUrl: escapeForXML(podcast.feedURL)
|
||||
}
|
||||
if (podcast.description) {
|
||||
feedAttributes.description = podcast.description
|
||||
feedAttributes.description = escapeForXML(podcast.description)
|
||||
}
|
||||
if (podcast.itunesPageUrl) {
|
||||
feedAttributes.htmlUrl = podcast.itunesPageUrl
|
||||
feedAttributes.htmlUrl = escapeForXML(podcast.itunesPageUrl)
|
||||
}
|
||||
if (podcast.language) {
|
||||
feedAttributes.language = podcast.language
|
||||
feedAttributes.language = escapeForXML(podcast.language)
|
||||
}
|
||||
bodyItems.push({
|
||||
outline: {
|
||||
|
|
|
|||
35
server/utils/parsers/parseComicInfoMetadata.js
Normal file
35
server/utils/parsers/parseComicInfoMetadata.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
|
||||
/**
|
||||
* TODO: Add more fields
|
||||
* @see https://anansi-project.github.io/docs/comicinfo/intro
|
||||
*
|
||||
* @param {Object} comicInfoJson
|
||||
* @returns {import('../../scanner/BookScanner').BookMetadataObject}
|
||||
*/
|
||||
module.exports.parse = (comicInfoJson) => {
|
||||
if (!comicInfoJson?.ComicInfo) return null
|
||||
|
||||
const ComicSeries = comicInfoJson.ComicInfo.Series?.[0]?.trim() || null
|
||||
const ComicNumber = comicInfoJson.ComicInfo.Number?.[0]?.trim() || null
|
||||
const ComicSummary = comicInfoJson.ComicInfo.Summary?.[0]?.trim() || null
|
||||
|
||||
let title = null
|
||||
const series = []
|
||||
if (ComicSeries) {
|
||||
series.push({
|
||||
name: ComicSeries,
|
||||
sequence: ComicNumber
|
||||
})
|
||||
|
||||
title = ComicSeries
|
||||
if (ComicNumber) {
|
||||
title += ` ${ComicNumber}`
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title,
|
||||
series,
|
||||
description: ComicSummary
|
||||
}
|
||||
}
|
||||
109
server/utils/parsers/parseComicMetadata.js
Normal file
109
server/utils/parsers/parseComicMetadata.js
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
const Path = require('path')
|
||||
const globals = require('../globals')
|
||||
const fs = require('../../libs/fsExtra')
|
||||
const Logger = require('../../Logger')
|
||||
const Archive = require('../../libs/libarchive/archive')
|
||||
const { xmlToJSON } = require('../index')
|
||||
const parseComicInfoMetadata = require('./parseComicInfoMetadata')
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} filepath
|
||||
* @returns {Promise<Buffer>}
|
||||
*/
|
||||
async function getComicFileBuffer(filepath) {
|
||||
if (!await fs.pathExists(filepath)) {
|
||||
Logger.error(`Comic path does not exist "${filepath}"`)
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return fs.readFile(filepath)
|
||||
} catch (error) {
|
||||
Logger.error(`Failed to read comic at "${filepath}"`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract cover image from comic return true if success
|
||||
*
|
||||
* @param {string} comicPath
|
||||
* @param {string} comicImageFilepath
|
||||
* @param {string} outputCoverPath
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function extractCoverImage(comicPath, comicImageFilepath, outputCoverPath) {
|
||||
const comicFileBuffer = await getComicFileBuffer(comicPath)
|
||||
if (!comicFileBuffer) return null
|
||||
|
||||
const archive = await Archive.open(comicFileBuffer)
|
||||
const fileEntry = await archive.extractSingleFile(comicImageFilepath)
|
||||
|
||||
if (!fileEntry?.fileData) {
|
||||
Logger.error(`[parseComicMetadata] Invalid file entry data for comicPath "${comicPath}"/${comicImageFilepath}`)
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.writeFile(outputCoverPath, fileEntry.fileData)
|
||||
return true
|
||||
} catch (error) {
|
||||
Logger.error(`[parseComicMetadata] Failed to extract image from comicPath "${comicPath}"`, error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
module.exports.extractCoverImage = extractCoverImage
|
||||
|
||||
/**
|
||||
* Parse metadata from comic
|
||||
*
|
||||
* @param {import('../../models/Book').EBookFileObject} ebookFile
|
||||
* @returns {Promise<import('./parseEbookMetadata').EBookFileScanData>}
|
||||
*/
|
||||
async function parse(ebookFile) {
|
||||
const comicPath = ebookFile.metadata.path
|
||||
Logger.debug(`Parsing metadata from comic at "${comicPath}"`)
|
||||
|
||||
const comicFileBuffer = await getComicFileBuffer(comicPath)
|
||||
if (!comicFileBuffer) return null
|
||||
|
||||
const archive = await Archive.open(comicFileBuffer)
|
||||
|
||||
const fileObjects = await archive.getFilesArray()
|
||||
|
||||
fileObjects.sort((a, b) => {
|
||||
return a.file.name.localeCompare(b.file.name, undefined, {
|
||||
numeric: true,
|
||||
sensitivity: 'base'
|
||||
})
|
||||
})
|
||||
|
||||
let metadata = null
|
||||
const comicInfo = fileObjects.find(fo => fo.file.name === 'ComicInfo.xml')
|
||||
if (comicInfo) {
|
||||
const comicInfoEntry = await comicInfo.file.extract()
|
||||
if (comicInfoEntry?.fileData) {
|
||||
const comicInfoStr = new TextDecoder().decode(comicInfoEntry.fileData)
|
||||
const comicInfoJson = await xmlToJSON(comicInfoStr)
|
||||
if (comicInfoJson) {
|
||||
metadata = parseComicInfoMetadata.parse(comicInfoJson)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const payload = {
|
||||
path: comicPath,
|
||||
ebookFormat: ebookFile.ebookFormat,
|
||||
metadata
|
||||
}
|
||||
|
||||
const firstImage = fileObjects.find(fo => globals.SupportedImageTypes.includes(Path.extname(fo.file.name).toLowerCase().slice(1)))
|
||||
if (firstImage?.file?._path) {
|
||||
payload.ebookCoverPath = firstImage.file._path
|
||||
} else {
|
||||
Logger.warn(`Cover image not found in comic at "${comicPath}"`)
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
module.exports.parse = parse
|
||||
47
server/utils/parsers/parseEbookMetadata.js
Normal file
47
server/utils/parsers/parseEbookMetadata.js
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
const parseEpubMetadata = require('./parseEpubMetadata')
|
||||
const parseComicMetadata = require('./parseComicMetadata')
|
||||
|
||||
/**
|
||||
* @typedef EBookFileScanData
|
||||
* @property {string} path
|
||||
* @property {string} ebookFormat
|
||||
* @property {string} ebookCoverPath internal image path
|
||||
* @property {import('../../scanner/BookScanner').BookMetadataObject} metadata
|
||||
*/
|
||||
|
||||
/**
|
||||
* Parse metadata from ebook file
|
||||
*
|
||||
* @param {import('../../models/Book').EBookFileObject} ebookFile
|
||||
* @returns {Promise<EBookFileScanData>}
|
||||
*/
|
||||
async function parse(ebookFile) {
|
||||
if (!ebookFile) return null
|
||||
|
||||
if (ebookFile.ebookFormat === 'epub') {
|
||||
return parseEpubMetadata.parse(ebookFile)
|
||||
} else if (['cbz', 'cbr'].includes(ebookFile.ebookFormat)) {
|
||||
return parseComicMetadata.parse(ebookFile)
|
||||
}
|
||||
return null
|
||||
}
|
||||
module.exports.parse = parse
|
||||
|
||||
/**
|
||||
* Extract cover from ebook file
|
||||
*
|
||||
* @param {EBookFileScanData} ebookFileScanData
|
||||
* @param {string} outputCoverPath
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function extractCoverImage(ebookFileScanData, outputCoverPath) {
|
||||
if (!ebookFileScanData?.ebookCoverPath) return false
|
||||
|
||||
if (ebookFileScanData.ebookFormat === 'epub') {
|
||||
return parseEpubMetadata.extractCoverImage(ebookFileScanData.path, ebookFileScanData.ebookCoverPath, outputCoverPath)
|
||||
} else if (['cbz', 'cbr'].includes(ebookFileScanData.ebookFormat)) {
|
||||
return parseComicMetadata.extractCoverImage(ebookFileScanData.path, ebookFileScanData.ebookCoverPath, outputCoverPath)
|
||||
}
|
||||
return false
|
||||
}
|
||||
module.exports.extractCoverImage = extractCoverImage
|
||||
110
server/utils/parsers/parseEpubMetadata.js
Normal file
110
server/utils/parsers/parseEpubMetadata.js
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const StreamZip = require('../../libs/nodeStreamZip')
|
||||
const parseOpfMetadata = require('./parseOpfMetadata')
|
||||
const { xmlToJSON } = require('../index')
|
||||
|
||||
|
||||
/**
|
||||
* Extract file from epub and return string content
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @param {string} filepath
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async function extractFileFromEpub(epubPath, filepath) {
|
||||
const zip = new StreamZip.async({ file: epubPath })
|
||||
const data = await zip.entryData(filepath).catch((error) => {
|
||||
Logger.error(`[parseEpubMetadata] Failed to extract ${filepath} from epub at "${epubPath}"`, error)
|
||||
})
|
||||
const filedata = data?.toString('utf8')
|
||||
await zip.close()
|
||||
return filedata
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract an XML file from epub and return JSON
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @param {string} xmlFilepath
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function extractXmlToJson(epubPath, xmlFilepath) {
|
||||
const filedata = await extractFileFromEpub(epubPath, xmlFilepath)
|
||||
if (!filedata) return null
|
||||
return xmlToJSON(filedata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract cover image from epub return true if success
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @param {string} epubImageFilepath
|
||||
* @param {string} outputCoverPath
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function extractCoverImage(epubPath, epubImageFilepath, outputCoverPath) {
|
||||
const zip = new StreamZip.async({ file: epubPath })
|
||||
|
||||
const success = await zip.extract(epubImageFilepath, outputCoverPath).then(() => true).catch((error) => {
|
||||
Logger.error(`[parseEpubMetadata] Failed to extract image ${epubImageFilepath} from epub at "${epubPath}"`, error)
|
||||
return false
|
||||
})
|
||||
|
||||
await zip.close()
|
||||
|
||||
return success
|
||||
}
|
||||
module.exports.extractCoverImage = extractCoverImage
|
||||
|
||||
/**
|
||||
* Parse metadata from epub
|
||||
*
|
||||
* @param {import('../../models/Book').EBookFileObject} ebookFile
|
||||
* @returns {Promise<import('./parseEbookMetadata').EBookFileScanData>}
|
||||
*/
|
||||
async function parse(ebookFile) {
|
||||
const epubPath = ebookFile.metadata.path
|
||||
Logger.debug(`Parsing metadata from epub at "${epubPath}"`)
|
||||
// Entrypoint of the epub that contains the filepath to the package document (opf file)
|
||||
const containerJson = await extractXmlToJson(epubPath, 'META-INF/container.xml')
|
||||
|
||||
// Get package document opf filepath from container.xml
|
||||
const packageDocPath = containerJson.container?.rootfiles?.[0]?.rootfile?.[0]?.$?.['full-path']
|
||||
if (!packageDocPath) {
|
||||
Logger.error(`Failed to get package doc path in Container.xml`, JSON.stringify(containerJson, null, 2))
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract package document to JSON
|
||||
const packageJson = await extractXmlToJson(epubPath, packageDocPath)
|
||||
if (!packageJson) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Parse metadata from package document opf file
|
||||
const opfMetadata = parseOpfMetadata.parseOpfMetadataJson(packageJson)
|
||||
if (!opfMetadata) {
|
||||
Logger.error(`Unable to parse metadata in package doc with json`, JSON.stringify(packageJson, null, 2))
|
||||
return null
|
||||
}
|
||||
|
||||
const payload = {
|
||||
path: epubPath,
|
||||
ebookFormat: 'epub',
|
||||
metadata: opfMetadata
|
||||
}
|
||||
|
||||
// Attempt to find filepath to cover image
|
||||
const manifestFirstImage = packageJson.package?.manifest?.[0]?.item?.find(item => item.$?.['media-type']?.startsWith('image/'))
|
||||
let coverImagePath = manifestFirstImage?.$?.href
|
||||
if (coverImagePath) {
|
||||
const packageDirname = Path.dirname(packageDocPath)
|
||||
payload.ebookCoverPath = Path.posix.join(packageDirname, coverImagePath)
|
||||
} else {
|
||||
Logger.warn(`Cover image not found in manifest for epub at "${epubPath}"`)
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
module.exports.parse = parse
|
||||
|
|
@ -103,15 +103,24 @@ function fetchSeries(metadataMeta) {
|
|||
if (!metadataMeta) return []
|
||||
const result = []
|
||||
for (let i = 0; i < metadataMeta.length; i++) {
|
||||
if (metadataMeta[i].$?.name === "calibre:series" && metadataMeta[i].$.content?.trim()) {
|
||||
if (metadataMeta[i].$?.name === 'calibre:series' && metadataMeta[i].$.content?.trim()) {
|
||||
const name = metadataMeta[i].$.content.trim()
|
||||
let sequence = null
|
||||
if (metadataMeta[i + 1]?.$?.name === "calibre:series_index" && metadataMeta[i + 1].$?.content?.trim()) {
|
||||
if (metadataMeta[i + 1]?.$?.name === 'calibre:series_index' && metadataMeta[i + 1].$?.content?.trim()) {
|
||||
sequence = metadataMeta[i + 1].$.content.trim()
|
||||
}
|
||||
result.push({ name, sequence })
|
||||
}
|
||||
}
|
||||
|
||||
// If one series was found with no series_index then check if any series_index meta can be found
|
||||
// this is to support when calibre:series_index is not directly underneath calibre:series
|
||||
if (result.length === 1 && !result[0].sequence) {
|
||||
const seriesIndexMeta = metadataMeta.find(m => m.$?.name === 'calibre:series_index' && m.$.content?.trim())
|
||||
if (seriesIndexMeta) {
|
||||
result[0].sequence = seriesIndexMeta.$.content.trim()
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
|
|
@ -136,11 +145,7 @@ function stripPrefix(str) {
|
|||
return str.split(':').pop()
|
||||
}
|
||||
|
||||
module.exports.parseOpfMetadataXML = async (xml) => {
|
||||
const json = await xmlToJSON(xml)
|
||||
|
||||
if (!json) return null
|
||||
|
||||
module.exports.parseOpfMetadataJson = (json) => {
|
||||
// Handle <package ...> or with prefix <ns0:package ...>
|
||||
const packageKey = Object.keys(json).find(key => stripPrefix(key) === 'package')
|
||||
if (!packageKey) return null
|
||||
|
|
@ -167,7 +172,7 @@ module.exports.parseOpfMetadataXML = async (xml) => {
|
|||
const creators = parseCreators(metadata)
|
||||
const authors = (fetchCreators(creators, 'aut') || []).map(au => au?.trim()).filter(au => au)
|
||||
const narrators = (fetchNarrators(creators, metadata) || []).map(nrt => nrt?.trim()).filter(nrt => nrt)
|
||||
const data = {
|
||||
return {
|
||||
title: fetchTitle(metadata),
|
||||
subtitle: fetchSubtitle(metadata),
|
||||
authors,
|
||||
|
|
@ -182,5 +187,10 @@ module.exports.parseOpfMetadataXML = async (xml) => {
|
|||
series: fetchSeries(metadataMeta),
|
||||
tags: fetchTags(metadata)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
module.exports.parseOpfMetadataXML = async (xml) => {
|
||||
const json = await xmlToJSON(xml)
|
||||
if (!json) return null
|
||||
return this.parseOpfMetadataJson(json)
|
||||
}
|
||||
|
|
@ -233,7 +233,7 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
|||
method: 'GET',
|
||||
timeout: 12000,
|
||||
responseType: 'arraybuffer',
|
||||
headers: { Accept: 'application/rss+xml, application/xhtml+xml, application/xml' },
|
||||
headers: { Accept: 'application/rss+xml, application/xhtml+xml, application/xml, */*;q=0.8' },
|
||||
httpAgent: ssrfFilter(feedUrl),
|
||||
httpsAgent: ssrfFilter(feedUrl)
|
||||
}).then(async (data) => {
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
const { parentPort } = require("worker_threads")
|
||||
const prober = require('./prober')
|
||||
|
||||
parentPort.on("message", async ({ mediaPath }) => {
|
||||
const results = await prober.probe(mediaPath)
|
||||
parentPort.postMessage({
|
||||
data: results,
|
||||
})
|
||||
})
|
||||
|
|
@ -110,7 +110,7 @@ module.exports = {
|
|||
})
|
||||
|
||||
// Filter out bad genres like "audiobook" and "audio book"
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => g && !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
genres.forEach((genre) => {
|
||||
if (!genreListeningMap[genre]) genreListeningMap[genre] = 0
|
||||
genreListeningMap[genre] += (ls.timeListening || 0)
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ module.exports = {
|
|||
})
|
||||
|
||||
// Filter out bad genres like "audiobook" and "audio book"
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => g && !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
genres.forEach((genre) => {
|
||||
if (!genreListeningMap[genre]) genreListeningMap[genre] = 0
|
||||
genreListeningMap[genre] += listeningSessionListeningTime
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue