mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2026-02-03 16:59:41 +00:00
chore: merge master
This commit is contained in:
commit
79c34d0638
217 changed files with 15818 additions and 7758 deletions
766
server/Auth.js
766
server/Auth.js
|
|
@ -1,32 +1,581 @@
|
|||
const axios = require('axios')
|
||||
const passport = require('passport')
|
||||
const bcrypt = require('./libs/bcryptjs')
|
||||
const jwt = require('./libs/jsonwebtoken')
|
||||
const requestIp = require('./libs/requestIp')
|
||||
const Logger = require('./Logger')
|
||||
const LocalStrategy = require('./libs/passportLocal')
|
||||
const JwtStrategy = require('passport-jwt').Strategy
|
||||
const ExtractJwt = require('passport-jwt').ExtractJwt
|
||||
const OpenIDClient = require('openid-client')
|
||||
const Database = require('./Database')
|
||||
const Logger = require('./Logger')
|
||||
const e = require('express')
|
||||
|
||||
/**
|
||||
* @class Class for handling all the authentication related functionality.
|
||||
*/
|
||||
class Auth {
|
||||
constructor() { }
|
||||
|
||||
cors(req, res, next) {
|
||||
res.header('Access-Control-Allow-Origin', '*')
|
||||
res.header("Access-Control-Allow-Methods", 'GET, POST, PATCH, PUT, DELETE, OPTIONS')
|
||||
res.header('Access-Control-Allow-Headers', '*')
|
||||
// TODO: Make sure allowing all headers is not a security concern. It is required for adding custom headers for SSO
|
||||
// res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Accept-Encoding, Range, Authorization")
|
||||
res.header('Access-Control-Allow-Credentials', true)
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.sendStatus(200)
|
||||
constructor() {
|
||||
// Map of openId sessions indexed by oauth2 state-variable
|
||||
this.openIdAuthSession = new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
* Inializes all passportjs strategies and other passportjs ralated initialization.
|
||||
*/
|
||||
async initPassportJs() {
|
||||
// Check if we should load the local strategy (username + password login)
|
||||
if (global.ServerSettings.authActiveAuthMethods.includes("local")) {
|
||||
this.initAuthStrategyPassword()
|
||||
}
|
||||
|
||||
// Check if we should load the openid strategy
|
||||
if (global.ServerSettings.authActiveAuthMethods.includes("openid")) {
|
||||
this.initAuthStrategyOpenID()
|
||||
}
|
||||
|
||||
// Load the JwtStrategy (always) -> for bearer token auth
|
||||
passport.use(new JwtStrategy({
|
||||
jwtFromRequest: ExtractJwt.fromExtractors([ExtractJwt.fromAuthHeaderAsBearerToken(), ExtractJwt.fromUrlQueryParameter('token')]),
|
||||
secretOrKey: Database.serverSettings.tokenSecret
|
||||
}, this.jwtAuthCheck.bind(this)))
|
||||
|
||||
// define how to seralize a user (to be put into the session)
|
||||
passport.serializeUser(function (user, cb) {
|
||||
process.nextTick(function () {
|
||||
// only store id to session
|
||||
return cb(null, JSON.stringify({
|
||||
id: user.id,
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
// define how to deseralize a user (use the ID to get it from the database)
|
||||
passport.deserializeUser((function (user, cb) {
|
||||
process.nextTick((async function () {
|
||||
const parsedUserInfo = JSON.parse(user)
|
||||
// load the user by ID that is stored in the session
|
||||
const dbUser = await Database.userModel.getUserById(parsedUserInfo.id)
|
||||
return cb(null, dbUser)
|
||||
}).bind(this))
|
||||
}).bind(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* Passport use LocalStrategy
|
||||
*/
|
||||
initAuthStrategyPassword() {
|
||||
passport.use(new LocalStrategy(this.localAuthCheckUserPw.bind(this)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Passport use OpenIDClient.Strategy
|
||||
*/
|
||||
initAuthStrategyOpenID() {
|
||||
if (!Database.serverSettings.isOpenIDAuthSettingsValid) {
|
||||
Logger.error(`[Auth] Cannot init openid auth strategy - invalid settings`)
|
||||
return
|
||||
}
|
||||
|
||||
const openIdIssuerClient = new OpenIDClient.Issuer({
|
||||
issuer: global.ServerSettings.authOpenIDIssuerURL,
|
||||
authorization_endpoint: global.ServerSettings.authOpenIDAuthorizationURL,
|
||||
token_endpoint: global.ServerSettings.authOpenIDTokenURL,
|
||||
userinfo_endpoint: global.ServerSettings.authOpenIDUserInfoURL,
|
||||
jwks_uri: global.ServerSettings.authOpenIDJwksURL
|
||||
}).Client
|
||||
const openIdClient = new openIdIssuerClient({
|
||||
client_id: global.ServerSettings.authOpenIDClientID,
|
||||
client_secret: global.ServerSettings.authOpenIDClientSecret
|
||||
})
|
||||
passport.use('openid-client', new OpenIDClient.Strategy({
|
||||
client: openIdClient,
|
||||
params: {
|
||||
redirect_uri: '/auth/openid/callback',
|
||||
scope: 'openid profile email'
|
||||
}
|
||||
}, async (tokenset, userinfo, done) => {
|
||||
Logger.debug(`[Auth] openid callback userinfo=`, userinfo)
|
||||
|
||||
let failureMessage = 'Unauthorized'
|
||||
if (!userinfo.sub) {
|
||||
Logger.error(`[Auth] openid callback invalid userinfo, no sub`)
|
||||
return done(null, null, failureMessage)
|
||||
}
|
||||
|
||||
// First check for matching user by sub
|
||||
let user = await Database.userModel.getUserByOpenIDSub(userinfo.sub)
|
||||
if (!user) {
|
||||
// Optionally match existing by email or username based on server setting "authOpenIDMatchExistingBy"
|
||||
if (Database.serverSettings.authOpenIDMatchExistingBy === 'email' && userinfo.email && userinfo.email_verified) {
|
||||
Logger.info(`[Auth] openid: User not found, checking existing with email "${userinfo.email}"`)
|
||||
user = await Database.userModel.getUserByEmail(userinfo.email)
|
||||
// Check that user is not already matched
|
||||
if (user?.authOpenIDSub) {
|
||||
Logger.warn(`[Auth] openid: User found with email "${userinfo.email}" but is already matched with sub "${user.authOpenIDSub}"`)
|
||||
// TODO: Message isn't actually returned to the user yet. Need to override the passport authenticated callback
|
||||
failureMessage = 'A matching user was found but is already matched with another user from your auth provider'
|
||||
user = null
|
||||
}
|
||||
} else if (Database.serverSettings.authOpenIDMatchExistingBy === 'username' && userinfo.preferred_username) {
|
||||
Logger.info(`[Auth] openid: User not found, checking existing with username "${userinfo.preferred_username}"`)
|
||||
user = await Database.userModel.getUserByUsername(userinfo.preferred_username)
|
||||
// Check that user is not already matched
|
||||
if (user?.authOpenIDSub) {
|
||||
Logger.warn(`[Auth] openid: User found with username "${userinfo.preferred_username}" but is already matched with sub "${user.authOpenIDSub}"`)
|
||||
// TODO: Message isn't actually returned to the user yet. Need to override the passport authenticated callback
|
||||
failureMessage = 'A matching user was found but is already matched with another user from your auth provider'
|
||||
user = null
|
||||
}
|
||||
}
|
||||
|
||||
// If existing user was matched and isActive then save sub to user
|
||||
if (user?.isActive) {
|
||||
Logger.info(`[Auth] openid: New user found matching existing user "${user.username}"`)
|
||||
user.authOpenIDSub = userinfo.sub
|
||||
await Database.userModel.updateFromOld(user)
|
||||
} else if (user && !user.isActive) {
|
||||
Logger.warn(`[Auth] openid: New user found matching existing user "${user.username}" but that user is deactivated`)
|
||||
}
|
||||
|
||||
// Optionally auto register the user
|
||||
if (!user && Database.serverSettings.authOpenIDAutoRegister) {
|
||||
Logger.info(`[Auth] openid: Auto-registering user with sub "${userinfo.sub}"`, userinfo)
|
||||
user = await Database.userModel.createUserFromOpenIdUserInfo(userinfo, this)
|
||||
}
|
||||
}
|
||||
|
||||
if (!user?.isActive) {
|
||||
if (user && !user.isActive) {
|
||||
failureMessage = 'Unauthorized'
|
||||
}
|
||||
// deny login
|
||||
done(null, null, failureMessage)
|
||||
return
|
||||
}
|
||||
|
||||
// permit login
|
||||
return done(null, user)
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Unuse strategy
|
||||
*
|
||||
* @param {string} name
|
||||
*/
|
||||
unuseAuthStrategy(name) {
|
||||
passport.unuse(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Use strategy
|
||||
*
|
||||
* @param {string} name
|
||||
*/
|
||||
useAuthStrategy(name) {
|
||||
if (name === 'openid') {
|
||||
this.initAuthStrategyOpenID()
|
||||
} else if (name === 'local') {
|
||||
this.initAuthStrategyPassword()
|
||||
} else {
|
||||
next()
|
||||
Logger.error('[Auth] Invalid auth strategy ' + name)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the client's choice how the login callback should happen in temp cookies
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
paramsToCookies(req, res) {
|
||||
// Set if isRest flag is set or if mobile oauth flow is used
|
||||
if (req.query.isRest?.toLowerCase() == 'true' || req.query.redirect_uri) {
|
||||
// store the isRest flag to the is_rest cookie
|
||||
res.cookie('is_rest', 'true', {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
} else {
|
||||
// no isRest-flag set -> set is_rest cookie to false
|
||||
res.cookie('is_rest', 'false', {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
|
||||
// persist state if passed in
|
||||
if (req.query.state) {
|
||||
res.cookie('auth_state', req.query.state, {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
}
|
||||
|
||||
const callback = req.query.redirect_uri || req.query.callback
|
||||
|
||||
// check if we are missing a callback parameter - we need one if isRest=false
|
||||
if (!callback) {
|
||||
res.status(400).send({
|
||||
message: 'No callback parameter'
|
||||
})
|
||||
return
|
||||
}
|
||||
// store the callback url to the auth_cb cookie
|
||||
res.cookie('auth_cb', callback, {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Informs the client in the right mode about a successfull login and the token
|
||||
* (clients choise is restored from cookies).
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async handleLoginSuccessBasedOnCookie(req, res) {
|
||||
// get userLogin json (information about the user, server and the session)
|
||||
const data_json = await this.getUserLoginResponsePayload(req.user)
|
||||
|
||||
if (req.cookies.is_rest === 'true') {
|
||||
// REST request - send data
|
||||
res.json(data_json)
|
||||
} else {
|
||||
// UI request -> check if we have a callback url
|
||||
// TODO: do we want to somehow limit the values for auth_cb?
|
||||
if (req.cookies.auth_cb) {
|
||||
let stateQuery = req.cookies.auth_state ? `&state=${req.cookies.auth_state}` : ''
|
||||
// UI request -> redirect to auth_cb url and send the jwt token as parameter
|
||||
res.redirect(302, `${req.cookies.auth_cb}?setToken=${data_json.user.token}${stateQuery}`)
|
||||
} else {
|
||||
res.status(400).send('No callback or already expired')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates all (express) routes required for authentication.
|
||||
*
|
||||
* @param {import('express').Router} router
|
||||
*/
|
||||
async initAuthRoutes(router) {
|
||||
// Local strategy login route (takes username and password)
|
||||
router.post('/login', passport.authenticate('local'), async (req, res) => {
|
||||
// return the user login response json if the login was successfull
|
||||
res.json(await this.getUserLoginResponsePayload(req.user))
|
||||
})
|
||||
|
||||
// openid strategy login route (this redirects to the configured openid login provider)
|
||||
router.get('/auth/openid', (req, res, next) => {
|
||||
try {
|
||||
// helper function from openid-client
|
||||
function pick(object, ...paths) {
|
||||
const obj = {}
|
||||
for (const path of paths) {
|
||||
if (object[path] !== undefined) {
|
||||
obj[path] = object[path]
|
||||
}
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
// Get the OIDC client from the strategy
|
||||
// We need to call the client manually, because the strategy does not support forwarding the code challenge
|
||||
// for API or mobile clients
|
||||
const oidcStrategy = passport._strategy('openid-client')
|
||||
const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'
|
||||
|
||||
let mobile_redirect_uri = null
|
||||
|
||||
// The client wishes a different redirect_uri
|
||||
// We will allow if it is in the whitelist, by saving it into this.openIdAuthSession and setting the redirect uri to /auth/openid/mobile-redirect
|
||||
// where we will handle the redirect to it
|
||||
if (req.query.redirect_uri) {
|
||||
// Check if the redirect_uri is in the whitelist
|
||||
if (Database.serverSettings.authOpenIDMobileRedirectURIs.includes(req.query.redirect_uri) ||
|
||||
(Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')) {
|
||||
oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/mobile-redirect`).toString()
|
||||
mobile_redirect_uri = req.query.redirect_uri
|
||||
} else {
|
||||
Logger.debug(`[Auth] Invalid redirect_uri=${req.query.redirect_uri} - not in whitelist`)
|
||||
return res.status(400).send('Invalid redirect_uri')
|
||||
}
|
||||
} else {
|
||||
oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/callback`).toString()
|
||||
}
|
||||
|
||||
Logger.debug(`[Auth] Oidc redirect_uri=${oidcStrategy._params.redirect_uri}`)
|
||||
const client = oidcStrategy._client
|
||||
const sessionKey = oidcStrategy._key
|
||||
|
||||
let code_challenge
|
||||
let code_challenge_method
|
||||
|
||||
// If code_challenge is provided, expect that code_verifier will be handled by the client (mobile app)
|
||||
// The web frontend of ABS does not need to do a PKCE itself, because it never handles the "code" of the oauth flow
|
||||
// and as such will not send a code challenge, we will generate then one
|
||||
if (req.query.code_challenge) {
|
||||
code_challenge = req.query.code_challenge
|
||||
code_challenge_method = req.query.code_challenge_method || 'S256'
|
||||
|
||||
if (!['S256', 'plain'].includes(code_challenge_method)) {
|
||||
return res.status(400).send('Invalid code_challenge_method')
|
||||
}
|
||||
} else {
|
||||
// If no code_challenge is provided, assume a web application flow and generate one
|
||||
const code_verifier = OpenIDClient.generators.codeVerifier()
|
||||
code_challenge = OpenIDClient.generators.codeChallenge(code_verifier)
|
||||
code_challenge_method = 'S256'
|
||||
|
||||
// Store the code_verifier in the session for later use in the token exchange
|
||||
req.session[sessionKey] = { ...req.session[sessionKey], code_verifier }
|
||||
}
|
||||
|
||||
const params = {
|
||||
state: OpenIDClient.generators.random(),
|
||||
// Other params by the passport strategy
|
||||
...oidcStrategy._params
|
||||
}
|
||||
|
||||
if (!params.nonce && params.response_type.includes('id_token')) {
|
||||
params.nonce = OpenIDClient.generators.random()
|
||||
}
|
||||
|
||||
req.session[sessionKey] = {
|
||||
...req.session[sessionKey],
|
||||
...pick(params, 'nonce', 'state', 'max_age', 'response_type'),
|
||||
mobile: req.query.redirect_uri, // Used in the abs callback later, set mobile if redirect_uri is filled out
|
||||
sso_redirect_uri: oidcStrategy._params.redirect_uri // Save the redirect_uri (for the SSO Provider) for the callback
|
||||
}
|
||||
|
||||
// We cannot save redirect_uri in the session, because it the mobile client uses browser instead of the API
|
||||
// for the request to mobile-redirect and as such the session is not shared
|
||||
this.openIdAuthSession.set(params.state, { mobile_redirect_uri: mobile_redirect_uri })
|
||||
|
||||
// Now get the URL to direct to
|
||||
const authorizationUrl = client.authorizationUrl({
|
||||
...params,
|
||||
scope: 'openid profile email',
|
||||
response_type: 'code',
|
||||
code_challenge,
|
||||
code_challenge_method
|
||||
})
|
||||
|
||||
// params (isRest, callback) to a cookie that will be send to the client
|
||||
this.paramsToCookies(req, res)
|
||||
|
||||
// Redirect the user agent (browser) to the authorization URL
|
||||
res.redirect(authorizationUrl)
|
||||
} catch (error) {
|
||||
Logger.error(`[Auth] Error in /auth/openid route: ${error}`)
|
||||
res.status(500).send('Internal Server Error')
|
||||
}
|
||||
})
|
||||
|
||||
// This will be the oauth2 callback route for mobile clients
|
||||
// It will redirect to an app-link like audiobookshelf://oauth
|
||||
router.get('/auth/openid/mobile-redirect', (req, res) => {
|
||||
try {
|
||||
// Extract the state parameter from the request
|
||||
const { state, code } = req.query
|
||||
|
||||
// Check if the state provided is in our list
|
||||
if (!state || !this.openIdAuthSession.has(state)) {
|
||||
Logger.error('[Auth] /auth/openid/mobile-redirect route: State parameter mismatch')
|
||||
return res.status(400).send('State parameter mismatch')
|
||||
}
|
||||
|
||||
let mobile_redirect_uri = this.openIdAuthSession.get(state).mobile_redirect_uri
|
||||
|
||||
if (!mobile_redirect_uri) {
|
||||
Logger.error('[Auth] No redirect URI')
|
||||
return res.status(400).send('No redirect URI')
|
||||
}
|
||||
|
||||
this.openIdAuthSession.delete(state)
|
||||
|
||||
const redirectUri = `${mobile_redirect_uri}?code=${encodeURIComponent(code)}&state=${encodeURIComponent(state)}`
|
||||
// Redirect to the overwrite URI saved in the map
|
||||
res.redirect(redirectUri)
|
||||
} catch (error) {
|
||||
Logger.error(`[Auth] Error in /auth/openid/mobile-redirect route: ${error}`)
|
||||
res.status(500).send('Internal Server Error')
|
||||
}
|
||||
})
|
||||
|
||||
// openid strategy callback route (this receives the token from the configured openid login provider)
|
||||
router.get('/auth/openid/callback', (req, res, next) => {
|
||||
const oidcStrategy = passport._strategy('openid-client')
|
||||
const sessionKey = oidcStrategy._key
|
||||
|
||||
if (!req.session[sessionKey]) {
|
||||
return res.status(400).send('No session')
|
||||
}
|
||||
|
||||
// If the client sends us a code_verifier, we will tell passport to use this to send this in the token request
|
||||
// The code_verifier will be validated by the oauth2 provider by comparing it to the code_challenge in the first request
|
||||
// Crucial for API/Mobile clients
|
||||
if (req.query.code_verifier) {
|
||||
req.session[sessionKey].code_verifier = req.query.code_verifier
|
||||
}
|
||||
|
||||
function handleAuthError(isMobile, errorCode, errorMessage, logMessage, response) {
|
||||
Logger.error(logMessage)
|
||||
if (response) {
|
||||
// Depending on the error, it can also have a body
|
||||
// We also log the request header the passport plugin sents for the URL
|
||||
const header = response.req?._header.replace(/Authorization: [^\r\n]*/i, 'Authorization: REDACTED')
|
||||
Logger.debug(header + '\n' + response.body?.toString())
|
||||
}
|
||||
|
||||
if (isMobile) {
|
||||
return res.status(errorCode).send(errorMessage)
|
||||
} else {
|
||||
return res.redirect(`/login?error=${encodeURIComponent(errorMessage)}&autoLaunch=0`)
|
||||
}
|
||||
}
|
||||
|
||||
function passportCallback(req, res, next) {
|
||||
return (err, user, info) => {
|
||||
const isMobile = req.session[sessionKey]?.mobile === true
|
||||
if (err) {
|
||||
return handleAuthError(isMobile, 500, 'Error in callback', `[Auth] Error in openid callback - ${err}`, err?.response)
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
// Info usually contains the error message from the SSO provider
|
||||
return handleAuthError(isMobile, 401, 'Unauthorized', `[Auth] No data in openid callback - ${info}`, info?.response)
|
||||
}
|
||||
|
||||
req.logIn(user, (loginError) => {
|
||||
if (loginError) {
|
||||
return handleAuthError(isMobile, 500, 'Error during login', `[Auth] Error in openid callback: ${loginError}`)
|
||||
}
|
||||
next()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// While not required by the standard, the passport plugin re-sends the original redirect_uri in the token request
|
||||
// We need to set it correctly, as some SSO providers (e.g. keycloak) check that parameter when it is provided
|
||||
// We set it here again because the passport param can change between requests
|
||||
return passport.authenticate('openid-client', { redirect_uri: req.session[sessionKey].sso_redirect_uri }, passportCallback(req, res, next))(req, res, next)
|
||||
},
|
||||
// on a successfull login: read the cookies and react like the client requested (callback or json)
|
||||
this.handleLoginSuccessBasedOnCookie.bind(this))
|
||||
|
||||
/**
|
||||
* Helper route used to auto-populate the openid URLs in config/authentication
|
||||
* Takes an issuer URL as a query param and requests the config data at "/.well-known/openid-configuration"
|
||||
*
|
||||
* @example /auth/openid/config?issuer=http://192.168.1.66:9000/application/o/audiobookshelf/
|
||||
*/
|
||||
router.get('/auth/openid/config', this.isAuthenticated, async (req, res) => {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[Auth] Non-admin user "${req.user.username}" attempted to get issuer config`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
if (!req.query.issuer) {
|
||||
return res.status(400).send('Invalid request. Query param \'issuer\' is required')
|
||||
}
|
||||
|
||||
// Strip trailing slash
|
||||
let issuerUrl = req.query.issuer
|
||||
if (issuerUrl.endsWith('/')) issuerUrl = issuerUrl.slice(0, -1)
|
||||
|
||||
// Append config pathname and validate URL
|
||||
let configUrl = null
|
||||
try {
|
||||
configUrl = new URL(`${issuerUrl}/.well-known/openid-configuration`)
|
||||
if (!configUrl.pathname.endsWith('/.well-known/openid-configuration')) {
|
||||
throw new Error('Invalid pathname')
|
||||
}
|
||||
} catch (error) {
|
||||
Logger.error(`[Auth] Failed to get openid configuration. Invalid URL "${configUrl}"`, error)
|
||||
return res.status(400).send('Invalid request. Query param \'issuer\' is invalid')
|
||||
}
|
||||
|
||||
axios.get(configUrl.toString()).then(({ data }) => {
|
||||
res.json({
|
||||
issuer: data.issuer,
|
||||
authorization_endpoint: data.authorization_endpoint,
|
||||
token_endpoint: data.token_endpoint,
|
||||
userinfo_endpoint: data.userinfo_endpoint,
|
||||
end_session_endpoint: data.end_session_endpoint,
|
||||
jwks_uri: data.jwks_uri
|
||||
})
|
||||
}).catch((error) => {
|
||||
Logger.error(`[Auth] Failed to get openid configuration at "${configUrl}"`, error)
|
||||
res.status(error.statusCode || 400).send(`${error.code || 'UNKNOWN'}: Failed to get openid configuration`)
|
||||
})
|
||||
})
|
||||
|
||||
// Logout route
|
||||
router.post('/logout', (req, res) => {
|
||||
// TODO: invalidate possible JWTs
|
||||
req.logout((err) => {
|
||||
if (err) {
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* middleware to use in express to only allow authenticated users.
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
* @param {import('express').NextFunction} next
|
||||
*/
|
||||
isAuthenticated(req, res, next) {
|
||||
// check if session cookie says that we are authenticated
|
||||
if (req.isAuthenticated()) {
|
||||
next()
|
||||
} else {
|
||||
// try JWT to authenticate
|
||||
passport.authenticate("jwt")(req, res, next)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to generate a jwt token for a given user
|
||||
*
|
||||
* @param {{ id:string, username:string }} user
|
||||
* @returns {string} token
|
||||
*/
|
||||
generateAccessToken(user) {
|
||||
return jwt.sign({ userId: user.id, username: user.username }, global.ServerSettings.tokenSecret)
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to validate a jwt token for a given user
|
||||
*
|
||||
* @param {string} token
|
||||
* @returns {Object} tokens data
|
||||
*/
|
||||
static validateAccessToken(token) {
|
||||
try {
|
||||
return jwt.verify(token, global.ServerSettings.tokenSecret)
|
||||
}
|
||||
catch (err) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a token which is used to encrpt/protect the jwts.
|
||||
*/
|
||||
async initTokenSecret() {
|
||||
if (process.env.TOKEN_SECRET) { // User can supply their own token secret
|
||||
Logger.debug(`[Auth] Setting token secret - using user passed in TOKEN_SECRET env var`)
|
||||
Database.serverSettings.tokenSecret = process.env.TOKEN_SECRET
|
||||
} else {
|
||||
Logger.debug(`[Auth] Setting token secret - using random bytes`)
|
||||
Database.serverSettings.tokenSecret = require('crypto').randomBytes(256).toString('base64')
|
||||
}
|
||||
await Database.updateServerSettings()
|
||||
|
|
@ -35,47 +584,83 @@ class Auth {
|
|||
const users = await Database.userModel.getOldUsers()
|
||||
if (users.length) {
|
||||
for (const user of users) {
|
||||
user.token = await this.generateAccessToken({ userId: user.id, username: user.username })
|
||||
Logger.warn(`[Auth] User ${user.username} api token has been updated using new token secret`)
|
||||
user.token = await this.generateAccessToken(user)
|
||||
}
|
||||
await Database.updateBulkUsers(users)
|
||||
}
|
||||
}
|
||||
|
||||
async authMiddleware(req, res, next) {
|
||||
var token = null
|
||||
/**
|
||||
* Checks if the user in the validated jwt_payload really exists and is active.
|
||||
* @param {Object} jwt_payload
|
||||
* @param {function} done
|
||||
*/
|
||||
async jwtAuthCheck(jwt_payload, done) {
|
||||
// load user by id from the jwt token
|
||||
const user = await Database.userModel.getUserByIdOrOldId(jwt_payload.userId)
|
||||
|
||||
// If using a get request, the token can be passed as a query string
|
||||
if (req.method === 'GET' && req.query && req.query.token) {
|
||||
token = req.query.token
|
||||
} else {
|
||||
const authHeader = req.headers['authorization']
|
||||
token = authHeader && authHeader.split(' ')[1]
|
||||
if (!user?.isActive) {
|
||||
// deny login
|
||||
done(null, null)
|
||||
return
|
||||
}
|
||||
|
||||
if (token == null) {
|
||||
Logger.error('Api called without a token', req.path)
|
||||
return res.sendStatus(401)
|
||||
}
|
||||
|
||||
const user = await this.verifyToken(token)
|
||||
if (!user) {
|
||||
Logger.error('Verify Token User Not Found', token)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
if (!user.isActive) {
|
||||
Logger.error('Verify Token User is disabled', token, user.username)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
req.user = user
|
||||
next()
|
||||
// approve login
|
||||
done(null, user)
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a username and password tuple is valid and the user active.
|
||||
* @param {string} username
|
||||
* @param {string} password
|
||||
* @param {function} done
|
||||
*/
|
||||
async localAuthCheckUserPw(username, password, done) {
|
||||
// Load the user given it's username
|
||||
const user = await Database.userModel.getUserByUsername(username.toLowerCase())
|
||||
|
||||
if (!user?.isActive) {
|
||||
done(null, null)
|
||||
return
|
||||
}
|
||||
|
||||
// Check passwordless root user
|
||||
if (user.type === 'root' && !user.pash) {
|
||||
if (password) {
|
||||
// deny login
|
||||
done(null, null)
|
||||
return
|
||||
}
|
||||
// approve login
|
||||
done(null, user)
|
||||
return
|
||||
} else if (!user.pash) {
|
||||
Logger.error(`[Auth] User "${user.username}"/"${user.type}" attempted to login without a password set`)
|
||||
done(null, null)
|
||||
return
|
||||
}
|
||||
|
||||
// Check password match
|
||||
const compare = await bcrypt.compare(password, user.pash)
|
||||
if (compare) {
|
||||
// approve login
|
||||
done(null, user)
|
||||
return
|
||||
}
|
||||
// deny login
|
||||
done(null, null)
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Hashes a password with bcrypt.
|
||||
* @param {string} password
|
||||
* @returns {string} hash
|
||||
*/
|
||||
hashPass(password) {
|
||||
return new Promise((resolve) => {
|
||||
bcrypt.hash(password, 8, (err, hash) => {
|
||||
if (err) {
|
||||
Logger.error('Hash failed', err)
|
||||
resolve(null)
|
||||
} else {
|
||||
resolve(hash)
|
||||
|
|
@ -84,36 +669,11 @@ class Auth {
|
|||
})
|
||||
}
|
||||
|
||||
generateAccessToken(payload) {
|
||||
return jwt.sign(payload, Database.serverSettings.tokenSecret)
|
||||
}
|
||||
|
||||
authenticateUser(token) {
|
||||
return this.verifyToken(token)
|
||||
}
|
||||
|
||||
verifyToken(token) {
|
||||
return new Promise((resolve) => {
|
||||
jwt.verify(token, Database.serverSettings.tokenSecret, async (err, payload) => {
|
||||
if (!payload || err) {
|
||||
Logger.error('JWT Verify Token Failed', err)
|
||||
return resolve(null)
|
||||
}
|
||||
|
||||
const user = await Database.userModel.getUserByIdOrOldId(payload.userId)
|
||||
if (user && user.username === payload.username) {
|
||||
resolve(user)
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Payload returned to a user after successful login
|
||||
* @param {oldUser} user
|
||||
* @returns {object}
|
||||
* Return the login info payload for a user
|
||||
*
|
||||
* @param {Object} user
|
||||
* @returns {Promise<Object>} jsonPayload
|
||||
*/
|
||||
async getUserLoginResponsePayload(user) {
|
||||
const libraryIds = await Database.libraryModel.getAllLibraryIds()
|
||||
|
|
@ -126,59 +686,28 @@ class Auth {
|
|||
}
|
||||
}
|
||||
|
||||
async login(req, res) {
|
||||
const ipAddress = requestIp.getClientIp(req)
|
||||
const username = (req.body.username || '').toLowerCase()
|
||||
const password = req.body.password || ''
|
||||
|
||||
const user = await Database.userModel.getUserByUsername(username)
|
||||
|
||||
if (!user?.isActive) {
|
||||
Logger.warn(`[Auth] Failed login attempt ${req.rateLimit.current} of ${req.rateLimit.limit} from ${ipAddress}`)
|
||||
if (req.rateLimit.remaining <= 2) {
|
||||
Logger.error(`[Auth] Failed login attempt for username ${username} from ip ${ipAddress}. Attempts: ${req.rateLimit.current}`)
|
||||
return res.status(401).send(`Invalid user or password (${req.rateLimit.remaining === 0 ? '1 attempt remaining' : `${req.rateLimit.remaining + 1} attempts remaining`})`)
|
||||
}
|
||||
return res.status(401).send('Invalid user or password')
|
||||
}
|
||||
|
||||
// Check passwordless root user
|
||||
if (user.type === 'root' && (!user.pash || user.pash === '')) {
|
||||
if (password) {
|
||||
return res.status(401).send('Invalid root password (hint: there is none)')
|
||||
} else {
|
||||
Logger.info(`[Auth] ${user.username} logged in from ${ipAddress}`)
|
||||
const userLoginResponsePayload = await this.getUserLoginResponsePayload(user)
|
||||
return res.json(userLoginResponsePayload)
|
||||
}
|
||||
}
|
||||
|
||||
// Check password match
|
||||
const compare = await bcrypt.compare(password, user.pash)
|
||||
if (compare) {
|
||||
Logger.info(`[Auth] ${user.username} logged in from ${ipAddress}`)
|
||||
const userLoginResponsePayload = await this.getUserLoginResponsePayload(user)
|
||||
res.json(userLoginResponsePayload)
|
||||
} else {
|
||||
Logger.warn(`[Auth] Failed login attempt ${req.rateLimit.current} of ${req.rateLimit.limit} from ${ipAddress}`)
|
||||
if (req.rateLimit.remaining <= 2) {
|
||||
Logger.error(`[Auth] Failed login attempt for user ${user.username} from ip ${ipAddress}. Attempts: ${req.rateLimit.current}`)
|
||||
return res.status(401).send(`Invalid user or password (${req.rateLimit.remaining === 0 ? '1 attempt remaining' : `${req.rateLimit.remaining + 1} attempts remaining`})`)
|
||||
}
|
||||
return res.status(401).send('Invalid user or password')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} password
|
||||
* @param {*} user
|
||||
* @returns {boolean}
|
||||
*/
|
||||
comparePassword(password, user) {
|
||||
if (user.type === 'root' && !password && !user.pash) return true
|
||||
if (!password || !user.pash) return false
|
||||
return bcrypt.compare(password, user.pash)
|
||||
}
|
||||
|
||||
/**
|
||||
* User changes their password from request
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async userChangePassword(req, res) {
|
||||
var { password, newPassword } = req.body
|
||||
let { password, newPassword } = req.body
|
||||
newPassword = newPassword || ''
|
||||
const matchingUser = await Database.userModel.getUserById(req.user.id)
|
||||
const matchingUser = req.user
|
||||
|
||||
// Only root can have an empty password
|
||||
if (matchingUser.type !== 'root' && !newPassword) {
|
||||
|
|
@ -187,6 +716,7 @@ class Auth {
|
|||
})
|
||||
}
|
||||
|
||||
// Check password match
|
||||
const compare = await this.comparePassword(password, matchingUser)
|
||||
if (!compare) {
|
||||
return res.json({
|
||||
|
|
@ -208,6 +738,7 @@ class Auth {
|
|||
|
||||
const success = await Database.updateUser(matchingUser)
|
||||
if (success) {
|
||||
Logger.info(`[Auth] User "${matchingUser.username}" changed password`)
|
||||
res.json({
|
||||
success: true
|
||||
})
|
||||
|
|
@ -218,4 +749,5 @@ class Auth {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Auth
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
const Path = require('path')
|
||||
const { Sequelize } = require('sequelize')
|
||||
const { Sequelize, Op } = require('sequelize')
|
||||
|
||||
const packageJson = require('../package.json')
|
||||
const fs = require('./libs/fsExtra')
|
||||
|
|
@ -122,11 +122,16 @@ class Database {
|
|||
return this.models.feed
|
||||
}
|
||||
|
||||
/** @type {typeof import('./models/Feed')} */
|
||||
/** @type {typeof import('./models/FeedEpisode')} */
|
||||
get feedEpisodeModel() {
|
||||
return this.models.feedEpisode
|
||||
}
|
||||
|
||||
/** @type {typeof import('./models/PlaybackSession')} */
|
||||
get playbackSessionModel() {
|
||||
return this.models.playbackSession
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if db file exists
|
||||
* @returns {boolean}
|
||||
|
|
@ -172,11 +177,11 @@ class Database {
|
|||
if (process.env.QUERY_LOGGING === "log") {
|
||||
// Setting QUERY_LOGGING=log will log all Sequelize queries before they run
|
||||
Logger.info(`[Database] Query logging enabled`)
|
||||
logging = (query) => Logger.dev(`Running the following query:\n ${query}`)
|
||||
logging = (query) => Logger.debug(`Running the following query:\n ${query}`)
|
||||
} else if (process.env.QUERY_LOGGING === "benchmark") {
|
||||
// Setting QUERY_LOGGING=benchmark will log all Sequelize queries and their execution times, after they run
|
||||
Logger.info(`[Database] Query benchmarking enabled"`)
|
||||
logging = (query, time) => Logger.dev(`Ran the following query in ${time}ms:\n ${query}`)
|
||||
logging = (query, time) => Logger.debug(`Ran the following query in ${time}ms:\n ${query}`)
|
||||
benchmark = true
|
||||
}
|
||||
|
||||
|
|
@ -276,11 +281,17 @@ class Database {
|
|||
global.ServerSettings = this.serverSettings.toJSON()
|
||||
|
||||
// Version specific migrations
|
||||
if (this.serverSettings.version === '2.3.0' && this.compareVersions(packageJson.version, '2.3.0') == 1) {
|
||||
await dbMigration.migrationPatch(this)
|
||||
if (packageJson.version !== this.serverSettings.version) {
|
||||
if (this.serverSettings.version === '2.3.0' && this.compareVersions(packageJson.version, '2.3.0') == 1) {
|
||||
await dbMigration.migrationPatch(this)
|
||||
}
|
||||
if (['2.3.0', '2.3.1', '2.3.2', '2.3.3'].includes(this.serverSettings.version) && this.compareVersions(packageJson.version, '2.3.3') >= 0) {
|
||||
await dbMigration.migrationPatch2(this)
|
||||
}
|
||||
}
|
||||
if (['2.3.0', '2.3.1', '2.3.2', '2.3.3'].includes(this.serverSettings.version) && this.compareVersions(packageJson.version, '2.3.3') >= 0) {
|
||||
await dbMigration.migrationPatch2(this)
|
||||
// Build migrations
|
||||
if (this.serverSettings.buildNumber <= 0) {
|
||||
await require('./utils/migrations/absMetadataMigration').migrate(this)
|
||||
}
|
||||
|
||||
await this.cleanDatabase()
|
||||
|
|
@ -288,9 +299,19 @@ class Database {
|
|||
// Set if root user has been created
|
||||
this.hasRootUser = await this.models.user.getHasRootUser()
|
||||
|
||||
// Update server settings with version/build
|
||||
let updateServerSettings = false
|
||||
if (packageJson.version !== this.serverSettings.version) {
|
||||
Logger.info(`[Database] Server upgrade detected from ${this.serverSettings.version} to ${packageJson.version}`)
|
||||
this.serverSettings.version = packageJson.version
|
||||
this.serverSettings.buildNumber = packageJson.buildNumber
|
||||
updateServerSettings = true
|
||||
} else if (packageJson.buildNumber !== this.serverSettings.buildNumber) {
|
||||
Logger.info(`[Database] Server v${packageJson.version} build upgraded from ${this.serverSettings.buildNumber} to ${packageJson.buildNumber}`)
|
||||
this.serverSettings.buildNumber = packageJson.buildNumber
|
||||
updateServerSettings = true
|
||||
}
|
||||
if (updateServerSettings) {
|
||||
await this.updateServerSettings()
|
||||
}
|
||||
}
|
||||
|
|
@ -677,6 +698,7 @@ class Database {
|
|||
* Clean invalid records in database
|
||||
* Series should have atleast one Book
|
||||
* Book and Podcast must have an associated LibraryItem
|
||||
* Remove playback sessions that are 3 seconds or less
|
||||
*/
|
||||
async cleanDatabase() {
|
||||
// Remove invalid Podcast records
|
||||
|
|
@ -717,6 +739,18 @@ class Database {
|
|||
Logger.warn(`Found series "${series.name}" with no books - removing it`)
|
||||
await series.destroy()
|
||||
}
|
||||
|
||||
// Remove playback sessions that were 3 seconds or less
|
||||
const badSessionsRemoved = await this.playbackSessionModel.destroy({
|
||||
where: {
|
||||
timeListening: {
|
||||
[Op.lte]: 3
|
||||
}
|
||||
}
|
||||
})
|
||||
if (badSessionsRemoved > 0) {
|
||||
Logger.warn(`Removed ${badSessionsRemoved} sessions that were 3 seconds or less`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class Logger {
|
|||
}
|
||||
|
||||
get timestamp() {
|
||||
return date.format(new Date(), 'YYYY-MM-DD HH:mm:ss')
|
||||
return date.format(new Date(), 'YYYY-MM-DD HH:mm:ss.SSS')
|
||||
}
|
||||
|
||||
get levelString() {
|
||||
|
|
@ -87,15 +87,6 @@ class Logger {
|
|||
this.debug(`Set Log Level to ${this.levelString}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Only to console and only for development
|
||||
* @param {...any} args
|
||||
*/
|
||||
dev(...args) {
|
||||
if (!this.isDev || process.env.HIDE_DEV_LOGS === '1') return
|
||||
console.log(`[${this.timestamp}] DEV:`, ...args)
|
||||
}
|
||||
|
||||
trace(...args) {
|
||||
if (this.logLevel > LogLevel.TRACE) return
|
||||
console.trace(`[${this.timestamp}] TRACE:`, ...args)
|
||||
|
|
|
|||
117
server/Server.js
117
server/Server.js
|
|
@ -5,6 +5,7 @@ const http = require('http')
|
|||
const fs = require('./libs/fsExtra')
|
||||
const fileUpload = require('./libs/expressFileupload')
|
||||
const rateLimit = require('./libs/expressRateLimit')
|
||||
const cookieParser = require("cookie-parser")
|
||||
|
||||
const { version } = require('../package.json')
|
||||
|
||||
|
|
@ -31,9 +32,14 @@ const PodcastManager = require('./managers/PodcastManager')
|
|||
const AudioMetadataMangaer = require('./managers/AudioMetadataManager')
|
||||
const RssFeedManager = require('./managers/RssFeedManager')
|
||||
const CronManager = require('./managers/CronManager')
|
||||
const TaskManager = require('./managers/TaskManager')
|
||||
const ApiCacheManager = require('./managers/ApiCacheManager')
|
||||
const BinaryManager = require('./managers/BinaryManager')
|
||||
const LibraryScanner = require('./scanner/LibraryScanner')
|
||||
|
||||
//Import the main Passport and Express-Session library
|
||||
const passport = require('passport')
|
||||
const expressSession = require('express-session')
|
||||
|
||||
class Server {
|
||||
constructor(SOURCE, PORT, HOST, UID, GID, CONFIG_PATH, METADATA_PATH, ROUTER_BASE_PATH) {
|
||||
this.Port = PORT
|
||||
|
|
@ -58,17 +64,18 @@ class Server {
|
|||
this.auth = new Auth()
|
||||
|
||||
// Managers
|
||||
this.taskManager = new TaskManager()
|
||||
this.notificationManager = new NotificationManager()
|
||||
this.emailManager = new EmailManager()
|
||||
this.backupManager = new BackupManager()
|
||||
this.logManager = new LogManager()
|
||||
this.abMergeManager = new AbMergeManager(this.taskManager)
|
||||
this.abMergeManager = new AbMergeManager()
|
||||
this.playbackSessionManager = new PlaybackSessionManager()
|
||||
this.podcastManager = new PodcastManager(this.watcher, this.notificationManager, this.taskManager)
|
||||
this.audioMetadataManager = new AudioMetadataMangaer(this.taskManager)
|
||||
this.podcastManager = new PodcastManager(this.watcher, this.notificationManager)
|
||||
this.audioMetadataManager = new AudioMetadataMangaer()
|
||||
this.rssFeedManager = new RssFeedManager()
|
||||
this.cronManager = new CronManager(this.podcastManager)
|
||||
this.apiCacheManager = new ApiCacheManager()
|
||||
this.binaryManager = new BinaryManager()
|
||||
|
||||
// Routers
|
||||
this.apiRouter = new ApiRouter(this)
|
||||
|
|
@ -81,17 +88,14 @@ class Server {
|
|||
}
|
||||
|
||||
authMiddleware(req, res, next) {
|
||||
this.auth.authMiddleware(req, res, next)
|
||||
// ask passportjs if the current request is authenticated
|
||||
this.auth.isAuthenticated(req, res, next)
|
||||
}
|
||||
|
||||
cancelLibraryScan(libraryId) {
|
||||
LibraryScanner.setCancelLibraryScan(libraryId)
|
||||
}
|
||||
|
||||
getLibrariesScanning() {
|
||||
return LibraryScanner.librariesScanning
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database, backups, logs, rss feeds, cron jobs & watcher
|
||||
* Cleanup stale/invalid data
|
||||
|
|
@ -116,6 +120,12 @@ class Server {
|
|||
|
||||
const libraries = await Database.libraryModel.getAllOldLibraries()
|
||||
await this.cronManager.init(libraries)
|
||||
this.apiCacheManager.init()
|
||||
|
||||
// Download ffmpeg & ffprobe if not found (Currently only in use for Windows installs)
|
||||
if (global.isWin || Logger.isDev) {
|
||||
await this.binaryManager.init()
|
||||
}
|
||||
|
||||
if (Database.serverSettings.scannerDisableWatcher) {
|
||||
Logger.info(`[Server] Watcher is disabled`)
|
||||
|
|
@ -130,20 +140,66 @@ class Server {
|
|||
await this.init()
|
||||
|
||||
const app = express()
|
||||
|
||||
/**
|
||||
* @temporary
|
||||
* This is necessary for the ebook & cover API endpoint in the mobile apps
|
||||
* The mobile app ereader is using fetch api in Capacitor that is currently difficult to switch to native requests
|
||||
* so we have to allow cors for specific origins to the /api/items/:id/ebook endpoint
|
||||
* The cover image is fetched with XMLHttpRequest in the mobile apps to load into a canvas and extract colors
|
||||
* @see https://ionicframework.com/docs/troubleshooting/cors
|
||||
*
|
||||
* Running in development allows cors to allow testing the mobile apps in the browser
|
||||
*/
|
||||
app.use((req, res, next) => {
|
||||
if (Logger.isDev || req.path.match(/\/api\/items\/([a-z0-9-]{36})\/(ebook|cover)(\/[0-9]+)?/)) {
|
||||
const allowedOrigins = ['capacitor://localhost', 'http://localhost']
|
||||
if (Logger.isDev || allowedOrigins.some(o => o === req.get('origin'))) {
|
||||
res.header('Access-Control-Allow-Origin', req.get('origin'))
|
||||
res.header("Access-Control-Allow-Methods", 'GET, POST, PATCH, PUT, DELETE, OPTIONS')
|
||||
res.header('Access-Control-Allow-Headers', '*')
|
||||
res.header('Access-Control-Allow-Credentials', true)
|
||||
if (req.method === 'OPTIONS') {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
})
|
||||
|
||||
// parse cookies in requests
|
||||
app.use(cookieParser())
|
||||
// enable express-session
|
||||
app.use(expressSession({
|
||||
secret: global.ServerSettings.tokenSecret,
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
cookie: {
|
||||
// also send the cookie if were are not on https (not every use has https)
|
||||
secure: false
|
||||
},
|
||||
}))
|
||||
// init passport.js
|
||||
app.use(passport.initialize())
|
||||
// register passport in express-session
|
||||
app.use(passport.session())
|
||||
// config passport.js
|
||||
await this.auth.initPassportJs()
|
||||
|
||||
const router = express.Router()
|
||||
app.use(global.RouterBasePath, router)
|
||||
app.disable('x-powered-by')
|
||||
|
||||
this.server = http.createServer(app)
|
||||
|
||||
router.use(this.auth.cors)
|
||||
router.use(fileUpload({
|
||||
defCharset: 'utf8',
|
||||
defParamCharset: 'utf8',
|
||||
useTempFiles: true,
|
||||
tempFileDir: Path.join(global.MetadataPath, 'tmp')
|
||||
}))
|
||||
router.use(express.urlencoded({ extended: true, limit: "5mb" }));
|
||||
router.use(express.urlencoded({ extended: true, limit: "5mb" }))
|
||||
router.use(express.json({ limit: "5mb" }))
|
||||
|
||||
// Static path to generated nuxt
|
||||
|
|
@ -153,7 +209,6 @@ class Server {
|
|||
// Static folder
|
||||
router.use(express.static(Path.join(global.appRoot, 'static')))
|
||||
|
||||
// router.use('/api/v1', routes) // TODO: New routes
|
||||
router.use('/api', this.authMiddleware.bind(this), this.apiRouter.router)
|
||||
router.use('/hls', this.authMiddleware.bind(this), this.hlsRouter.router)
|
||||
|
||||
|
|
@ -162,7 +217,7 @@ class Server {
|
|||
Logger.info(`[Server] Requesting rss feed ${req.params.slug}`)
|
||||
this.rssFeedManager.getFeed(req, res)
|
||||
})
|
||||
router.get('/feed/:slug/cover', (req, res) => {
|
||||
router.get('/feed/:slug/cover*', (req, res) => {
|
||||
this.rssFeedManager.getFeedCover(req, res)
|
||||
})
|
||||
router.get('/feed/:slug/item/:episodeId/*', (req, res) => {
|
||||
|
|
@ -170,6 +225,9 @@ class Server {
|
|||
this.rssFeedManager.getFeedItem(req, res)
|
||||
})
|
||||
|
||||
// Auth routes
|
||||
await this.auth.initAuthRoutes(router)
|
||||
|
||||
// Client dynamic routes
|
||||
const dyanimicRoutes = [
|
||||
'/item/:id',
|
||||
|
|
@ -181,6 +239,7 @@ class Server {
|
|||
'/library/:library/search',
|
||||
'/library/:library/bookshelf/:id?',
|
||||
'/library/:library/authors',
|
||||
'/library/:library/narrators',
|
||||
'/library/:library/series/:id?',
|
||||
'/library/:library/podcast/search',
|
||||
'/library/:library/podcast/latest',
|
||||
|
|
@ -193,8 +252,8 @@ class Server {
|
|||
]
|
||||
dyanimicRoutes.forEach((route) => router.get(route, (req, res) => res.sendFile(Path.join(distPath, 'index.html'))))
|
||||
|
||||
router.post('/login', this.getLoginRateLimiter(), (req, res) => this.auth.login(req, res))
|
||||
router.post('/logout', this.authMiddleware.bind(this), this.logout.bind(this))
|
||||
// router.post('/login', passport.authenticate('local', this.auth.login), this.auth.loginResult.bind(this))
|
||||
// router.post('/logout', this.authMiddleware.bind(this), this.logout.bind(this))
|
||||
router.post('/init', (req, res) => {
|
||||
if (Database.hasRootUser) {
|
||||
Logger.error(`[Server] attempt to init server when server already has a root user`)
|
||||
|
|
@ -206,8 +265,12 @@ class Server {
|
|||
// status check for client to see if server has been initialized
|
||||
// server has been initialized if a root user exists
|
||||
const payload = {
|
||||
app: 'audiobookshelf',
|
||||
serverVersion: version,
|
||||
isInit: Database.hasRootUser,
|
||||
language: Database.serverSettings.language
|
||||
language: Database.serverSettings.language,
|
||||
authMethods: Database.serverSettings.authActiveAuthMethods,
|
||||
authFormData: Database.serverSettings.authFormData
|
||||
}
|
||||
if (!payload.isInit) {
|
||||
payload.ConfigPath = global.ConfigPath
|
||||
|
|
@ -221,6 +284,19 @@ class Server {
|
|||
})
|
||||
app.get('/healthcheck', (req, res) => res.sendStatus(200))
|
||||
|
||||
let sigintAlreadyReceived = false
|
||||
process.on('SIGINT', async () => {
|
||||
if (!sigintAlreadyReceived) {
|
||||
sigintAlreadyReceived = true
|
||||
Logger.info('SIGINT (Ctrl+C) received. Shutting down...')
|
||||
await this.stop()
|
||||
Logger.info('Server stopped. Exiting.')
|
||||
} else {
|
||||
Logger.info('SIGINT (Ctrl+C) received again. Exiting immediately.')
|
||||
}
|
||||
process.exit(0)
|
||||
})
|
||||
|
||||
this.server.listen(this.Port, this.Host, () => {
|
||||
if (this.Host) Logger.info(`Listening on http://${this.Host}:${this.Port}`)
|
||||
else Logger.info(`Listening on port :${this.Port}`)
|
||||
|
|
@ -327,12 +403,17 @@ class Server {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gracefully stop server
|
||||
* Stops watcher and socket server
|
||||
*/
|
||||
async stop() {
|
||||
Logger.info('=== Stopping Server ===')
|
||||
await this.watcher.close()
|
||||
Logger.info('Watcher Closed')
|
||||
|
||||
return new Promise((resolve) => {
|
||||
this.server.close((err) => {
|
||||
SocketAuthority.close((err) => {
|
||||
if (err) {
|
||||
Logger.error('Failed to close server', err)
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
const SocketIO = require('socket.io')
|
||||
const Logger = require('./Logger')
|
||||
const Database = require('./Database')
|
||||
const Auth = require('./Auth')
|
||||
|
||||
class SocketAuthority {
|
||||
constructor() {
|
||||
|
|
@ -72,6 +73,20 @@ class SocketAuthority {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the Socket.IO server and disconnect all clients
|
||||
*
|
||||
* @param {Function} callback
|
||||
*/
|
||||
close(callback) {
|
||||
Logger.info('[SocketAuthority] Shutting down')
|
||||
// This will close all open socket connections, and also close the underlying http server
|
||||
if (this.io)
|
||||
this.io.close(callback)
|
||||
else
|
||||
callback()
|
||||
}
|
||||
|
||||
initialize(Server) {
|
||||
this.Server = Server
|
||||
|
||||
|
|
@ -81,6 +96,7 @@ class SocketAuthority {
|
|||
methods: ["GET", "POST"]
|
||||
}
|
||||
})
|
||||
|
||||
this.io.on('connection', (socket) => {
|
||||
this.clients[socket.id] = {
|
||||
id: socket.id,
|
||||
|
|
@ -144,14 +160,31 @@ class SocketAuthority {
|
|||
})
|
||||
}
|
||||
|
||||
// When setting up a socket connection the user needs to be associated with a socket id
|
||||
// for this the client will send a 'auth' event that includes the users API token
|
||||
/**
|
||||
* When setting up a socket connection the user needs to be associated with a socket id
|
||||
* for this the client will send a 'auth' event that includes the users API token
|
||||
*
|
||||
* @param {SocketIO.Socket} socket
|
||||
* @param {string} token JWT
|
||||
*/
|
||||
async authenticateSocket(socket, token) {
|
||||
const user = await this.Server.auth.authenticateUser(token)
|
||||
if (!user) {
|
||||
// we don't use passport to authenticate the jwt we get over the socket connection.
|
||||
// it's easier to directly verify/decode it.
|
||||
const token_data = Auth.validateAccessToken(token)
|
||||
|
||||
if (!token_data?.userId) {
|
||||
// Token invalid
|
||||
Logger.error('Cannot validate socket - invalid token')
|
||||
return socket.emit('invalid_token')
|
||||
}
|
||||
// get the user via the id from the decoded jwt.
|
||||
const user = await Database.userModel.getUserByIdOrOldId(token_data.userId)
|
||||
if (!user) {
|
||||
// user not found
|
||||
Logger.error('Cannot validate socket - invalid token')
|
||||
return socket.emit('invalid_token')
|
||||
}
|
||||
|
||||
const client = this.clients[socket.id]
|
||||
if (!client) {
|
||||
Logger.error(`[SocketAuthority] Socket for user ${user.username} has no client`)
|
||||
|
|
@ -173,14 +206,13 @@ class SocketAuthority {
|
|||
|
||||
this.adminEmitter('user_online', client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions))
|
||||
|
||||
// Update user lastSeen
|
||||
// Update user lastSeen without firing sequelize bulk update hooks
|
||||
user.lastSeen = Date.now()
|
||||
await Database.updateUser(user)
|
||||
await Database.userModel.updateFromOld(user, false)
|
||||
|
||||
const initialPayload = {
|
||||
userId: client.user.id,
|
||||
username: client.user.username,
|
||||
librariesScanning: this.Server.getLibrariesScanning()
|
||||
username: client.user.username
|
||||
}
|
||||
if (user.isAdminOrUp) {
|
||||
initialPayload.usersOnline = this.getUsersOnline()
|
||||
|
|
|
|||
|
|
@ -3,8 +3,10 @@ const EventEmitter = require('events')
|
|||
const Watcher = require('./libs/watcher/watcher')
|
||||
const Logger = require('./Logger')
|
||||
const LibraryScanner = require('./scanner/LibraryScanner')
|
||||
const Task = require('./objects/Task')
|
||||
const TaskManager = require('./managers/TaskManager')
|
||||
|
||||
const { filePathToPOSIX } = require('./utils/fileUtils')
|
||||
const { filePathToPOSIX, isSameOrSubPath, getFileMTimeMs } = require('./utils/fileUtils')
|
||||
|
||||
/**
|
||||
* @typedef PendingFileUpdate
|
||||
|
|
@ -22,7 +24,12 @@ class FolderWatcher extends EventEmitter {
|
|||
/** @type {PendingFileUpdate[]} */
|
||||
this.pendingFileUpdates = []
|
||||
this.pendingDelay = 4000
|
||||
/** @type {NodeJS.Timeout} */
|
||||
this.pendingTimeout = null
|
||||
/** @type {Task} */
|
||||
this.pendingTask = null
|
||||
|
||||
this.filesBeingAdded = new Set()
|
||||
|
||||
/** @type {string[]} */
|
||||
this.ignoreDirs = []
|
||||
|
|
@ -59,14 +66,13 @@ class FolderWatcher extends EventEmitter {
|
|||
})
|
||||
watcher
|
||||
.on('add', (path) => {
|
||||
this.onNewFile(library.id, path)
|
||||
this.onFileAdded(library.id, filePathToPOSIX(path))
|
||||
}).on('change', (path) => {
|
||||
// This is triggered from metadata changes, not what we want
|
||||
// this.onFileUpdated(path)
|
||||
}).on('unlink', path => {
|
||||
this.onFileRemoved(library.id, path)
|
||||
this.onFileRemoved(library.id, filePathToPOSIX(path))
|
||||
}).on('rename', (path, pathNext) => {
|
||||
this.onRename(library.id, path, pathNext)
|
||||
this.onFileRename(library.id, filePathToPOSIX(path), filePathToPOSIX(pathNext))
|
||||
}).on('error', (error) => {
|
||||
Logger.error(`[Watcher] ${error}`)
|
||||
}).on('ready', () => {
|
||||
|
|
@ -132,14 +138,31 @@ class FolderWatcher extends EventEmitter {
|
|||
return this.libraryWatchers.map(lib => lib.watcher.close())
|
||||
}
|
||||
|
||||
onNewFile(libraryId, path) {
|
||||
/**
|
||||
* Watcher detected file added
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
*/
|
||||
onFileAdded(libraryId, path) {
|
||||
if (this.checkShouldIgnorePath(path)) {
|
||||
return
|
||||
}
|
||||
Logger.debug('[Watcher] File Added', path)
|
||||
this.addFileUpdate(libraryId, path, 'added')
|
||||
|
||||
if (!this.filesBeingAdded.has(path)) {
|
||||
this.filesBeingAdded.add(path)
|
||||
this.waitForFileToAdd(path)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Watcher detected file removed
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
*/
|
||||
onFileRemoved(libraryId, path) {
|
||||
if (this.checkShouldIgnorePath(path)) {
|
||||
return
|
||||
|
|
@ -148,11 +171,13 @@ class FolderWatcher extends EventEmitter {
|
|||
this.addFileUpdate(libraryId, path, 'deleted')
|
||||
}
|
||||
|
||||
onFileUpdated(path) {
|
||||
Logger.debug('[Watcher] Updated File', path)
|
||||
}
|
||||
|
||||
onRename(libraryId, pathFrom, pathTo) {
|
||||
/**
|
||||
* Watcher detected file renamed
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
*/
|
||||
onFileRename(libraryId, pathFrom, pathTo) {
|
||||
if (this.checkShouldIgnorePath(pathTo)) {
|
||||
return
|
||||
}
|
||||
|
|
@ -161,13 +186,41 @@ class FolderWatcher extends EventEmitter {
|
|||
}
|
||||
|
||||
/**
|
||||
* File update detected from watcher
|
||||
* Get mtimeMs from an added file every second until it is no longer changing
|
||||
* Times out after 180s
|
||||
*
|
||||
* @param {string} path
|
||||
* @param {number} [lastMTimeMs=0]
|
||||
* @param {number} [loop=0]
|
||||
*/
|
||||
async waitForFileToAdd(path, lastMTimeMs = 0, loop = 0) {
|
||||
// Safety to catch infinite loop (180s)
|
||||
if (loop >= 180) {
|
||||
Logger.warn(`[Watcher] Waiting to add file at "${path}" timeout (loop ${loop}) - proceeding`)
|
||||
return this.filesBeingAdded.delete(path)
|
||||
}
|
||||
|
||||
const mtimeMs = await getFileMTimeMs(path)
|
||||
if (mtimeMs === lastMTimeMs) {
|
||||
if (lastMTimeMs) Logger.debug(`[Watcher] File finished adding at "${path}"`)
|
||||
return this.filesBeingAdded.delete(path)
|
||||
}
|
||||
if (lastMTimeMs % 5 === 0) {
|
||||
Logger.debug(`[Watcher] Waiting to add file at "${path}". mtimeMs=${mtimeMs} lastMTimeMs=${lastMTimeMs} (loop ${loop})`)
|
||||
}
|
||||
// Wait 1 second
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
this.waitForFileToAdd(path, mtimeMs, ++loop)
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue file update
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string} path
|
||||
* @param {string} type
|
||||
*/
|
||||
addFileUpdate(libraryId, path, type) {
|
||||
path = filePathToPOSIX(path)
|
||||
if (this.pendingFilePaths.includes(path)) return
|
||||
|
||||
// Get file library
|
||||
|
|
@ -178,7 +231,7 @@ class FolderWatcher extends EventEmitter {
|
|||
}
|
||||
|
||||
// Get file folder
|
||||
const folder = libwatcher.folders.find(fold => path.startsWith(filePathToPOSIX(fold.fullPath)))
|
||||
const folder = libwatcher.folders.find(fold => isSameOrSubPath(fold.fullPath, path))
|
||||
if (!folder) {
|
||||
Logger.error(`[Watcher] New file folder not found in library "${libwatcher.name}" with path "${path}"`)
|
||||
return
|
||||
|
|
@ -202,6 +255,13 @@ class FolderWatcher extends EventEmitter {
|
|||
|
||||
Logger.debug(`[Watcher] Modified file in library "${libwatcher.name}" and folder "${folder.id}" with relPath "${relPath}"`)
|
||||
|
||||
if (!this.pendingTask) {
|
||||
const taskData = {
|
||||
libraryId,
|
||||
libraryName: libwatcher.name
|
||||
}
|
||||
this.pendingTask = TaskManager.createAndAddTask('watcher-scan', `Scanning file changes in "${libwatcher.name}"`, null, true, taskData)
|
||||
}
|
||||
this.pendingFileUpdates.push({
|
||||
path,
|
||||
relPath,
|
||||
|
|
@ -210,18 +270,32 @@ class FolderWatcher extends EventEmitter {
|
|||
type
|
||||
})
|
||||
|
||||
// Notify server of update after "pendingDelay"
|
||||
this.handlePendingFileUpdatesTimeout()
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait X seconds before notifying scanner that files changed
|
||||
* reset timer if files are still copying
|
||||
*/
|
||||
handlePendingFileUpdatesTimeout() {
|
||||
clearTimeout(this.pendingTimeout)
|
||||
this.pendingTimeout = setTimeout(() => {
|
||||
// this.emit('files', this.pendingFileUpdates)
|
||||
LibraryScanner.scanFilesChanged(this.pendingFileUpdates)
|
||||
// Check that files are not still being added
|
||||
if (this.pendingFileUpdates.some(pfu => this.filesBeingAdded.has(pfu.path))) {
|
||||
Logger.debug(`[Watcher] Still waiting for pending files "${[...this.filesBeingAdded].join(', ')}"`)
|
||||
return this.handlePendingFileUpdatesTimeout()
|
||||
}
|
||||
|
||||
LibraryScanner.scanFilesChanged(this.pendingFileUpdates, this.pendingTask)
|
||||
this.pendingTask = null
|
||||
this.pendingFileUpdates = []
|
||||
this.filesBeingAdded.clear()
|
||||
}, this.pendingDelay)
|
||||
}
|
||||
|
||||
checkShouldIgnorePath(path) {
|
||||
return !!this.ignoreDirs.find(dirpath => {
|
||||
return filePathToPOSIX(path).startsWith(dirpath)
|
||||
return isSameOrSubPath(dirpath, path)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -67,30 +67,10 @@ class AuthorController {
|
|||
const payload = req.body
|
||||
let hasUpdated = false
|
||||
|
||||
// Updating/removing cover image
|
||||
if (payload.imagePath !== undefined && payload.imagePath !== req.author.imagePath) {
|
||||
if (!payload.imagePath && req.author.imagePath) { // If removing image then remove file
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
await CoverManager.removeFile(req.author.imagePath)
|
||||
} else if (payload.imagePath.startsWith('http')) { // Check if image path is a url
|
||||
const imageData = await AuthorFinder.saveAuthorImage(req.author.id, payload.imagePath)
|
||||
if (imageData) {
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
payload.imagePath = imageData.path
|
||||
hasUpdated = true
|
||||
}
|
||||
} else if (payload.imagePath && payload.imagePath !== req.author.imagePath) { // Changing image path locally
|
||||
if (!await fs.pathExists(payload.imagePath)) { // Make sure image path exists
|
||||
Logger.error(`[AuthorController] Image path does not exist: "${payload.imagePath}"`)
|
||||
return res.status(400).send('Author image path does not exist')
|
||||
}
|
||||
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
}
|
||||
// author imagePath must be set through other endpoints as of v2.4.5
|
||||
if (payload.imagePath !== undefined) {
|
||||
Logger.warn(`[AuthorController] Updating local author imagePath is not supported`)
|
||||
delete payload.imagePath
|
||||
}
|
||||
|
||||
const authorNameUpdate = payload.name !== undefined && payload.name !== req.author.name
|
||||
|
|
@ -131,7 +111,7 @@ class AuthorController {
|
|||
Database.removeAuthorFromFilterData(req.author.libraryId, req.author.id)
|
||||
|
||||
// Send updated num books for merged author
|
||||
const numBooks = await Database.libraryItemModel.getForAuthor(existingAuthor).length
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(existingAuthor)).length
|
||||
SocketAuthority.emitter('author_updated', existingAuthor.toJSONExpanded(numBooks))
|
||||
|
||||
res.json({
|
||||
|
|
@ -191,6 +171,75 @@ class AuthorController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/authors/:id/image
|
||||
* Upload author image from web URL
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async uploadImage(req, res) {
|
||||
if (!req.user.canUpload) {
|
||||
Logger.warn('User attempted to upload an image without permission', req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
if (!req.body.url) {
|
||||
Logger.error(`[AuthorController] Invalid request payload. 'url' not in request body`)
|
||||
return res.status(400).send(`Invalid request payload. 'url' not in request body`)
|
||||
}
|
||||
if (!req.body.url.startsWith?.('http:') && !req.body.url.startsWith?.('https:')) {
|
||||
Logger.error(`[AuthorController] Invalid request payload. Invalid url "${req.body.url}"`)
|
||||
return res.status(400).send(`Invalid request payload. Invalid url "${req.body.url}"`)
|
||||
}
|
||||
|
||||
Logger.debug(`[AuthorController] Requesting download author image from url "${req.body.url}"`)
|
||||
const result = await AuthorFinder.saveAuthorImage(req.author.id, req.body.url)
|
||||
|
||||
if (result?.error) {
|
||||
return res.status(400).send(result.error)
|
||||
} else if (!result?.path) {
|
||||
return res.status(500).send('Unknown error occurred')
|
||||
}
|
||||
|
||||
if (req.author.imagePath) {
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
}
|
||||
|
||||
req.author.imagePath = result.path
|
||||
await Database.authorModel.updateFromOld(req.author)
|
||||
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
|
||||
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
|
||||
res.json({
|
||||
author: req.author.toJSON()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE: /api/authors/:id/image
|
||||
* Remove author image & delete image file
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async deleteImage(req, res) {
|
||||
if (!req.author.imagePath) {
|
||||
Logger.error(`[AuthorController] Author "${req.author.imagePath}" has no imagePath set`)
|
||||
return res.status(400).send('Author has no image path set')
|
||||
}
|
||||
Logger.info(`[AuthorController] Removing image for author "${req.author.name}" at "${req.author.imagePath}"`)
|
||||
await CacheManager.purgeImageCache(req.author.id) // Purge cache
|
||||
await CoverManager.removeFile(req.author.imagePath)
|
||||
req.author.imagePath = null
|
||||
await Database.authorModel.updateFromOld(req.author)
|
||||
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
|
||||
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
|
||||
res.json({
|
||||
author: req.author.toJSON()
|
||||
})
|
||||
}
|
||||
|
||||
async match(req, res) {
|
||||
let authorData = null
|
||||
const region = req.body.region || 'us'
|
||||
|
|
@ -215,7 +264,7 @@ class AuthorController {
|
|||
await CacheManager.purgeImageCache(req.author.id)
|
||||
|
||||
const imageData = await AuthorFinder.saveAuthorImage(req.author.id, authorData.image)
|
||||
if (imageData) {
|
||||
if (imageData?.path) {
|
||||
req.author.imagePath = imageData.path
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -231,7 +280,7 @@ class AuthorController {
|
|||
|
||||
await Database.updateAuthor(req.author)
|
||||
|
||||
const numBooks = await Database.libraryItemModel.getForAuthor(req.author).length
|
||||
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
|
||||
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -51,32 +51,45 @@ class EmailController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Send ebook to device
|
||||
* User must have access to device and library item
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async sendEBookToDevice(req, res) {
|
||||
Logger.debug(`[EmailController] Send ebook to device request for libraryItemId=${req.body.libraryItemId}, deviceName=${req.body.deviceName}`)
|
||||
Logger.debug(`[EmailController] Send ebook to device requested by user "${req.user.username}" for libraryItemId=${req.body.libraryItemId}, deviceName=${req.body.deviceName}`)
|
||||
|
||||
const device = Database.emailSettings.getEReaderDevice(req.body.deviceName)
|
||||
if (!device) {
|
||||
return res.status(404).send('Ereader device not found')
|
||||
}
|
||||
|
||||
// Check user has access to device
|
||||
if (!Database.emailSettings.checkUserCanAccessDevice(device, req.user)) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(req.body.libraryItemId)
|
||||
if (!libraryItem) {
|
||||
return res.status(404).send('Library item not found')
|
||||
}
|
||||
|
||||
// Check user has access to library item
|
||||
if (!req.user.checkCanAccessLibraryItem(libraryItem)) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const ebookFile = libraryItem.media.ebookFile
|
||||
if (!ebookFile) {
|
||||
return res.status(404).send('EBook file not found')
|
||||
}
|
||||
|
||||
const device = Database.emailSettings.getEReaderDevice(req.body.deviceName)
|
||||
if (!device) {
|
||||
return res.status(404).send('E-reader device not found')
|
||||
return res.status(404).send('Ebook file not found')
|
||||
}
|
||||
|
||||
this.emailManager.sendEBookToDevice(ebookFile, device, res)
|
||||
}
|
||||
|
||||
middleware(req, res, next) {
|
||||
adminMiddleware(req, res, next) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,31 +1,69 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../Logger')
|
||||
const Database = require('../Database')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const { toNumber } = require('../utils/index')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
|
||||
class FileSystemController {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getPaths(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[FileSystemController] Non-admin user attempting to get filesystem paths`, req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const excludedDirs = ['node_modules', 'client', 'server', '.git', 'static', 'build', 'dist', 'metadata', 'config', 'sys', 'proc'].map(dirname => {
|
||||
return Path.sep + dirname
|
||||
})
|
||||
const relpath = req.query.path
|
||||
const level = toNumber(req.query.level, 0)
|
||||
|
||||
// Do not include existing mapped library paths in response
|
||||
const libraryFoldersPaths = await Database.libraryFolderModel.getAllLibraryFolderPaths()
|
||||
libraryFoldersPaths.forEach((path) => {
|
||||
let dir = path || ''
|
||||
if (dir.includes(global.appRoot)) dir = dir.replace(global.appRoot, '')
|
||||
excludedDirs.push(dir)
|
||||
// Validate path. Must be absolute
|
||||
if (relpath && (!Path.isAbsolute(relpath) || !await fs.pathExists(relpath))) {
|
||||
Logger.error(`[FileSystemController] Invalid path in query string "${relpath}"`)
|
||||
return res.status(400).send('Invalid "path" query string')
|
||||
}
|
||||
Logger.debug(`[FileSystemController] Getting file paths at ${relpath || 'root'} (${level})`)
|
||||
|
||||
let directories = []
|
||||
|
||||
// Windows returns drives first
|
||||
if (global.isWin) {
|
||||
if (relpath) {
|
||||
directories = await fileUtils.getDirectoriesInPath(relpath, level)
|
||||
} else {
|
||||
const drives = await fileUtils.getWindowsDrives().catch((error) => {
|
||||
Logger.error(`[FileSystemController] Failed to get windows drives`, error)
|
||||
return []
|
||||
})
|
||||
if (drives.length) {
|
||||
directories = drives.map(d => {
|
||||
return {
|
||||
path: d,
|
||||
dirname: d,
|
||||
level: 0
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
directories = await fileUtils.getDirectoriesInPath(relpath || '/', level)
|
||||
}
|
||||
|
||||
// Exclude some dirs from this project to be cleaner in Docker
|
||||
const excludedDirs = ['node_modules', 'client', 'server', '.git', 'static', 'build', 'dist', 'metadata', 'config', 'sys', 'proc', '.devcontainer', '.nyc_output', '.github', '.vscode'].map(dirname => {
|
||||
return fileUtils.filePathToPOSIX(Path.join(global.appRoot, dirname))
|
||||
})
|
||||
directories = directories.filter(dir => {
|
||||
return !excludedDirs.includes(dir.path)
|
||||
})
|
||||
|
||||
res.json({
|
||||
directories: await this.getDirectories(global.appRoot, '/', excludedDirs)
|
||||
posix: !global.isWin,
|
||||
directories
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ const libraryItemsBookFilters = require('../utils/queries/libraryItemsBookFilter
|
|||
const libraryItemFilters = require('../utils/queries/libraryItemFilters')
|
||||
const seriesFilters = require('../utils/queries/seriesFilters')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const { sort, createNewSortInstance } = require('../libs/fastSort')
|
||||
const { asciiOnlyToLowerCase } = require('../utils/index')
|
||||
const { createNewSortInstance } = require('../libs/fastSort')
|
||||
const naturalSort = createNewSortInstance({
|
||||
comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare
|
||||
})
|
||||
|
|
@ -551,11 +552,11 @@ class LibraryController {
|
|||
* @param {import('express').Response} res
|
||||
*/
|
||||
async search(req, res) {
|
||||
if (!req.query.q) {
|
||||
return res.status(400).send('No query string')
|
||||
if (!req.query.q || typeof req.query.q !== 'string') {
|
||||
return res.status(400).send('Invalid request. Query param "q" must be a string')
|
||||
}
|
||||
const limit = req.query.limit && !isNaN(req.query.limit) ? Number(req.query.limit) : 12
|
||||
const query = req.query.q.trim().toLowerCase()
|
||||
const query = asciiOnlyToLowerCase(req.query.q.trim())
|
||||
|
||||
const matches = await libraryItemFilters.search(req.user, req.library, query, limit)
|
||||
res.json(matches)
|
||||
|
|
@ -620,7 +621,7 @@ class LibraryController {
|
|||
model: Database.bookModel,
|
||||
attributes: ['id', 'tags', 'explicit'],
|
||||
where: bookWhere,
|
||||
required: false,
|
||||
required: !req.user.isAdminOrUp, // Only show authors with 0 books for admin users or up
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
|
|
@ -774,6 +775,13 @@ class LibraryController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/libraries/:id/matchall
|
||||
* Quick match all library items. Book libraries only.
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async matchAll(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-root user attempted to match library items`, req.user)
|
||||
|
|
@ -783,7 +791,14 @@ class LibraryController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
// POST: api/libraries/:id/scan
|
||||
/**
|
||||
* POST: /api/libraries/:id/scan
|
||||
* Optional query:
|
||||
* ?force=1
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async scan(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-root user attempted to scan library`, req.user)
|
||||
|
|
@ -791,7 +806,8 @@ class LibraryController {
|
|||
}
|
||||
res.sendStatus(200)
|
||||
|
||||
await LibraryScanner.scan(req.library)
|
||||
const forceRescan = req.query.force === '1'
|
||||
await LibraryScanner.scan(req.library, forceRescan)
|
||||
|
||||
await Database.resetLibraryIssuesFilterData(req.library.id)
|
||||
Logger.info('[LibraryController] Scan complete')
|
||||
|
|
@ -845,6 +861,56 @@ class LibraryController {
|
|||
res.send(opmlText)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all metadata.json or metadata.abs files in library item folders
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async removeAllMetadataFiles(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-admin user attempted to remove all metadata files`, req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const fileExt = req.query.ext === 'abs' ? 'abs' : 'json'
|
||||
const metadataFilename = `metadata.${fileExt}`
|
||||
const libraryItemsWithMetadata = await Database.libraryItemModel.findAll({
|
||||
attributes: ['id', 'libraryFiles'],
|
||||
where: [
|
||||
{
|
||||
libraryId: req.library.id
|
||||
},
|
||||
Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM json_each(libraryFiles) WHERE json_valid(libraryFiles) AND json_extract(json_each.value, "$.metadata.filename") = "${metadataFilename}")`), {
|
||||
[Sequelize.Op.gte]: 1
|
||||
})
|
||||
]
|
||||
})
|
||||
if (!libraryItemsWithMetadata.length) {
|
||||
Logger.info(`[LibraryController] No ${metadataFilename} files found to remove`)
|
||||
return res.json({
|
||||
found: 0
|
||||
})
|
||||
}
|
||||
|
||||
Logger.info(`[LibraryController] Found ${libraryItemsWithMetadata.length} ${metadataFilename} files to remove`)
|
||||
|
||||
let numRemoved = 0
|
||||
for (const libraryItem of libraryItemsWithMetadata) {
|
||||
const metadataFilepath = libraryItem.libraryFiles.find(lf => lf.metadata.filename === metadataFilename)?.metadata.path
|
||||
if (!metadataFilepath) continue
|
||||
Logger.debug(`[LibraryController] Removing file "${metadataFilepath}"`)
|
||||
if ((await fileUtils.removeFile(metadataFilepath))) {
|
||||
numRemoved++
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
found: libraryItemsWithMetadata.length,
|
||||
removed: numRemoved
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware that is not using libraryItems from memory
|
||||
* @param {import('express').Request} req
|
||||
|
|
|
|||
|
|
@ -85,12 +85,31 @@ class LibraryItemController {
|
|||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/items/:id/download
|
||||
* Download library item. Zip file if multiple files.
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
download(req, res) {
|
||||
if (!req.user.canDownload) {
|
||||
Logger.warn('User attempted to download without permission', req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// If library item is a single file in root dir then no need to zip
|
||||
if (req.libraryItem.isFile) {
|
||||
// Express does not set the correct mimetype for m4b files so use our defined mimetypes if available
|
||||
const audioMimeType = getAudioMimeTypeFromExtname(Path.extname(req.libraryItem.path))
|
||||
if (audioMimeType) {
|
||||
res.setHeader('Content-Type', audioMimeType)
|
||||
}
|
||||
|
||||
res.download(req.libraryItem.path, req.libraryItem.relPath)
|
||||
return
|
||||
}
|
||||
|
||||
const libraryItemPath = req.libraryItem.path
|
||||
const itemTitle = req.libraryItem.media.metadata.title
|
||||
Logger.info(`[LibraryItemController] User "${req.user.username}" requested download for item "${itemTitle}" at "${libraryItemPath}"`)
|
||||
|
|
@ -163,22 +182,22 @@ class LibraryItemController {
|
|||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
var libraryItem = req.libraryItem
|
||||
let libraryItem = req.libraryItem
|
||||
|
||||
var result = null
|
||||
if (req.body && req.body.url) {
|
||||
let result = null
|
||||
if (req.body?.url) {
|
||||
Logger.debug(`[LibraryItemController] Requesting download cover from url "${req.body.url}"`)
|
||||
result = await CoverManager.downloadCoverFromUrl(libraryItem, req.body.url)
|
||||
} else if (req.files && req.files.cover) {
|
||||
} else if (req.files?.cover) {
|
||||
Logger.debug(`[LibraryItemController] Handling uploaded cover`)
|
||||
result = await CoverManager.uploadCover(libraryItem, req.files.cover)
|
||||
} else {
|
||||
return res.status(400).send('Invalid request no file or url')
|
||||
}
|
||||
|
||||
if (result && result.error) {
|
||||
if (result?.error) {
|
||||
return res.status(400).send(result.error)
|
||||
} else if (!result || !result.cover) {
|
||||
} else if (!result?.cover) {
|
||||
return res.status(500).send('Unknown error occurred')
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ const SocketAuthority = require('../SocketAuthority')
|
|||
const Database = require('../Database')
|
||||
const { sort } = require('../libs/fastSort')
|
||||
const { toNumber } = require('../utils/index')
|
||||
const userStats = require('../utils/queries/userStats')
|
||||
|
||||
class MeController {
|
||||
constructor() { }
|
||||
|
|
@ -333,5 +334,21 @@ class MeController {
|
|||
}
|
||||
res.json(req.user.toJSONForBrowser())
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/stats/year/:year
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getStatsForYear(req, res) {
|
||||
const year = Number(req.params.year)
|
||||
if (isNaN(year) || year < 2000 || year > 9999) {
|
||||
Logger.error(`[MeController] Invalid year "${year}"`)
|
||||
return res.status(400).send('Invalid year')
|
||||
}
|
||||
const data = await userStats.getStatsForYear(req.user, year)
|
||||
res.json(data)
|
||||
}
|
||||
}
|
||||
module.exports = new MeController()
|
||||
|
|
@ -8,6 +8,10 @@ const Database = require('../Database')
|
|||
const libraryItemFilters = require('../utils/queries/libraryItemFilters')
|
||||
const patternValidation = require('../libs/nodeCron/pattern-validation')
|
||||
const { isObject, getTitleIgnorePrefix } = require('../utils/index')
|
||||
const { sanitizeFilename } = require('../utils/fileUtils')
|
||||
|
||||
const TaskManager = require('../managers/TaskManager')
|
||||
const adminStats = require('../utils/queries/adminStats')
|
||||
|
||||
//
|
||||
// This is a controller for routes that don't have a home yet :(
|
||||
|
|
@ -30,12 +34,9 @@ class MiscController {
|
|||
Logger.error('Invalid request, no files')
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
const files = Object.values(req.files)
|
||||
const title = req.body.title
|
||||
const author = req.body.author
|
||||
const series = req.body.series
|
||||
const libraryId = req.body.library
|
||||
const folderId = req.body.folder
|
||||
const { title, author, series, folder: folderId, library: libraryId } = req.body
|
||||
|
||||
const library = await Database.libraryModel.getOldById(libraryId)
|
||||
if (!library) {
|
||||
|
|
@ -50,43 +51,29 @@ class MiscController {
|
|||
return res.status(500).send(`Invalid post data`)
|
||||
}
|
||||
|
||||
// For setting permissions recursively
|
||||
let outputDirectory = ''
|
||||
let firstDirPath = ''
|
||||
|
||||
if (library.isPodcast) { // Podcasts only in 1 folder
|
||||
outputDirectory = Path.join(folder.fullPath, title)
|
||||
firstDirPath = outputDirectory
|
||||
} else {
|
||||
firstDirPath = Path.join(folder.fullPath, author)
|
||||
if (series && author) {
|
||||
outputDirectory = Path.join(folder.fullPath, author, series, title)
|
||||
} else if (author) {
|
||||
outputDirectory = Path.join(folder.fullPath, author, title)
|
||||
} else {
|
||||
outputDirectory = Path.join(folder.fullPath, title)
|
||||
}
|
||||
}
|
||||
|
||||
if (await fs.pathExists(outputDirectory)) {
|
||||
Logger.error(`[Server] Upload directory "${outputDirectory}" already exists`)
|
||||
return res.status(500).send(`Directory "${outputDirectory}" already exists`)
|
||||
}
|
||||
// Podcasts should only be one folder deep
|
||||
const outputDirectoryParts = library.isPodcast ? [title] : [author, series, title]
|
||||
// `.filter(Boolean)` to strip out all the potentially missing details (eg: `author`)
|
||||
// before sanitizing all the directory parts to remove illegal chars and finally prepending
|
||||
// the base folder path
|
||||
const cleanedOutputDirectoryParts = outputDirectoryParts.filter(Boolean).map(part => sanitizeFilename(part))
|
||||
const outputDirectory = Path.join(...[folder.fullPath, ...cleanedOutputDirectoryParts])
|
||||
|
||||
await fs.ensureDir(outputDirectory)
|
||||
|
||||
Logger.info(`Uploading ${files.length} files to`, outputDirectory)
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
var file = files[i]
|
||||
for (const file of files) {
|
||||
const path = Path.join(outputDirectory, sanitizeFilename(file.name))
|
||||
|
||||
var path = Path.join(outputDirectory, file.name)
|
||||
await file.mv(path).then(() => {
|
||||
return true
|
||||
}).catch((error) => {
|
||||
Logger.error('Failed to move file', path, error)
|
||||
return false
|
||||
})
|
||||
await file.mv(path)
|
||||
.then(() => {
|
||||
return true
|
||||
})
|
||||
.catch((error) => {
|
||||
Logger.error('Failed to move file', path, error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
res.sendStatus(200)
|
||||
|
|
@ -102,7 +89,7 @@ class MiscController {
|
|||
const includeArray = (req.query.include || '').split(',')
|
||||
|
||||
const data = {
|
||||
tasks: this.taskManager.tasks.map(t => t.toJSON())
|
||||
tasks: TaskManager.tasks.map(t => t.toJSON())
|
||||
}
|
||||
|
||||
if (includeArray.includes('queue')) {
|
||||
|
|
@ -117,8 +104,9 @@ class MiscController {
|
|||
/**
|
||||
* PATCH: /api/settings
|
||||
* Update server settings
|
||||
* @param {*} req
|
||||
* @param {*} res
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async updateServerSettings(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
|
|
@ -126,7 +114,7 @@ class MiscController {
|
|||
return res.sendStatus(403)
|
||||
}
|
||||
const settingsUpdate = req.body
|
||||
if (!settingsUpdate || !isObject(settingsUpdate)) {
|
||||
if (!isObject(settingsUpdate)) {
|
||||
return res.status(400).send('Invalid settings update object')
|
||||
}
|
||||
|
||||
|
|
@ -246,8 +234,8 @@ class MiscController {
|
|||
* POST: /api/authorize
|
||||
* Used to authorize an API token
|
||||
*
|
||||
* @param {*} req
|
||||
* @param {*} res
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async authorize(req, res) {
|
||||
if (!req.user) {
|
||||
|
|
@ -525,6 +513,54 @@ class MiscController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/watcher/update
|
||||
* Update a watch path
|
||||
* Req.body { libraryId, path, type, [oldPath] }
|
||||
* type = add, unlink, rename
|
||||
* oldPath = required only for rename
|
||||
* @this import('../routers/ApiRouter')
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
updateWatchedPath(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user attempted to updateWatchedPath`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
const libraryId = req.body.libraryId
|
||||
const path = req.body.path
|
||||
const type = req.body.type
|
||||
if (!libraryId || !path || !type) {
|
||||
Logger.error(`[MiscController] Invalid request body for updateWatchedPath. libraryId: "${libraryId}", path: "${path}", type: "${type}"`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'add':
|
||||
this.watcher.onFileAdded(libraryId, path)
|
||||
break
|
||||
case 'unlink':
|
||||
this.watcher.onFileRemoved(libraryId, path)
|
||||
break
|
||||
case 'rename':
|
||||
const oldPath = req.body.oldPath
|
||||
if (!oldPath) {
|
||||
Logger.error(`[MiscController] Invalid request body for updateWatchedPath. oldPath is required for rename.`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
this.watcher.onFileRename(libraryId, oldPath, path)
|
||||
break
|
||||
default:
|
||||
Logger.error(`[MiscController] Invalid type for updateWatchedPath. type: "${type}"`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
validateCronExpression(req, res) {
|
||||
const expression = req.body.expression
|
||||
if (!expression) {
|
||||
|
|
@ -539,5 +575,147 @@ class MiscController {
|
|||
res.status(400).send(error.message)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: api/auth-settings (admin only)
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
getAuthSettings(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user "${req.user.username}" attempted to get auth settings`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
return res.json(Database.serverSettings.authenticationSettings)
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH: api/auth-settings
|
||||
* @this import('../routers/ApiRouter')
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async updateAuthSettings(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user "${req.user.username}" attempted to update auth settings`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const settingsUpdate = req.body
|
||||
if (!isObject(settingsUpdate)) {
|
||||
return res.status(400).send('Invalid auth settings update object')
|
||||
}
|
||||
|
||||
let hasUpdates = false
|
||||
|
||||
const currentAuthenticationSettings = Database.serverSettings.authenticationSettings
|
||||
const originalAuthMethods = [...currentAuthenticationSettings.authActiveAuthMethods]
|
||||
|
||||
// TODO: Better validation of auth settings once auth settings are separated from server settings
|
||||
for (const key in currentAuthenticationSettings) {
|
||||
if (settingsUpdate[key] === undefined) continue
|
||||
|
||||
if (key === 'authActiveAuthMethods') {
|
||||
let updatedAuthMethods = settingsUpdate[key]?.filter?.((authMeth) => Database.serverSettings.supportedAuthMethods.includes(authMeth))
|
||||
if (Array.isArray(updatedAuthMethods) && updatedAuthMethods.length) {
|
||||
updatedAuthMethods.sort()
|
||||
currentAuthenticationSettings[key].sort()
|
||||
if (updatedAuthMethods.join() !== currentAuthenticationSettings[key].join()) {
|
||||
Logger.debug(`[MiscController] Updating auth settings key "authActiveAuthMethods" from "${currentAuthenticationSettings[key].join()}" to "${updatedAuthMethods.join()}"`)
|
||||
Database.serverSettings[key] = updatedAuthMethods
|
||||
hasUpdates = true
|
||||
}
|
||||
} else {
|
||||
Logger.warn(`[MiscController] Invalid value for authActiveAuthMethods`)
|
||||
}
|
||||
} else if (key === 'authOpenIDMobileRedirectURIs') {
|
||||
function isValidRedirectURI(uri) {
|
||||
if (typeof uri !== 'string') return false
|
||||
const pattern = new RegExp('^\\w+://[\\w.-]+$', 'i')
|
||||
return pattern.test(uri)
|
||||
}
|
||||
|
||||
const uris = settingsUpdate[key]
|
||||
if (!Array.isArray(uris) ||
|
||||
(uris.includes('*') && uris.length > 1) ||
|
||||
uris.some(uri => uri !== '*' && !isValidRedirectURI(uri))) {
|
||||
Logger.warn(`[MiscController] Invalid value for authOpenIDMobileRedirectURIs`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Update the URIs
|
||||
if (Database.serverSettings[key].some(uri => !uris.includes(uri)) || uris.some(uri => !Database.serverSettings[key].includes(uri))) {
|
||||
Logger.debug(`[MiscController] Updating auth settings key "${key}" from "${Database.serverSettings[key]}" to "${uris}"`)
|
||||
Database.serverSettings[key] = uris
|
||||
hasUpdates = true
|
||||
}
|
||||
} else {
|
||||
const updatedValueType = typeof settingsUpdate[key]
|
||||
if (['authOpenIDAutoLaunch', 'authOpenIDAutoRegister'].includes(key)) {
|
||||
if (updatedValueType !== 'boolean') {
|
||||
Logger.warn(`[MiscController] Invalid value for ${key}. Expected boolean`)
|
||||
continue
|
||||
}
|
||||
} else if (settingsUpdate[key] !== null && updatedValueType !== 'string') {
|
||||
Logger.warn(`[MiscController] Invalid value for ${key}. Expected string or null`)
|
||||
continue
|
||||
}
|
||||
let updatedValue = settingsUpdate[key]
|
||||
if (updatedValue === '') updatedValue = null
|
||||
let currentValue = currentAuthenticationSettings[key]
|
||||
if (currentValue === '') currentValue = null
|
||||
|
||||
if (updatedValue !== currentValue) {
|
||||
Logger.debug(`[MiscController] Updating auth settings key "${key}" from "${currentValue}" to "${updatedValue}"`)
|
||||
Database.serverSettings[key] = updatedValue
|
||||
hasUpdates = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasUpdates) {
|
||||
await Database.updateServerSettings()
|
||||
|
||||
// Use/unuse auth methods
|
||||
Database.serverSettings.supportedAuthMethods.forEach((authMethod) => {
|
||||
if (originalAuthMethods.includes(authMethod) && !Database.serverSettings.authActiveAuthMethods.includes(authMethod)) {
|
||||
// Auth method has been removed
|
||||
Logger.info(`[MiscController] Disabling active auth method "${authMethod}"`)
|
||||
this.auth.unuseAuthStrategy(authMethod)
|
||||
} else if (!originalAuthMethods.includes(authMethod) && Database.serverSettings.authActiveAuthMethods.includes(authMethod)) {
|
||||
// Auth method has been added
|
||||
Logger.info(`[MiscController] Enabling active auth method "${authMethod}"`)
|
||||
this.auth.useAuthStrategy(authMethod)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
res.json({
|
||||
updated: hasUpdates,
|
||||
serverSettings: Database.serverSettings.toJSONForBrowser()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/me/stats/year/:year
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getAdminStatsForYear(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user "${req.user.username}" attempted to get admin stats for year`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
const year = Number(req.params.year)
|
||||
if (isNaN(year) || year < 2000 || year > 9999) {
|
||||
Logger.error(`[MiscController] Invalid year "${year}"`)
|
||||
return res.status(400).send('Invalid year')
|
||||
}
|
||||
const stats = await adminStats.getStatsForYear(year)
|
||||
res.json(stats)
|
||||
}
|
||||
}
|
||||
module.exports = new MiscController()
|
||||
module.exports = new MiscController()
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ const fs = require('../libs/fsExtra')
|
|||
|
||||
const { getPodcastFeed, findMatchingEpisodes } = require('../utils/podcastUtils')
|
||||
const { getFileTimestampsWithIno, filePathToPOSIX } = require('../utils/fileUtils')
|
||||
const { validateUrl } = require('../utils/index')
|
||||
|
||||
const Scanner = require('../scanner/Scanner')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
|
|
@ -16,7 +17,7 @@ class PodcastController {
|
|||
|
||||
async create(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[PodcastController] Non-admin user attempted to create podcast`, req.user)
|
||||
Logger.error(`[PodcastController] Non-admin user "${req.user.username}" attempted to create podcast`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
const payload = req.body
|
||||
|
|
@ -102,10 +103,24 @@ class PodcastController {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/podcasts/feed
|
||||
*
|
||||
* @typedef getPodcastFeedReqBody
|
||||
* @property {string} rssFeed
|
||||
*
|
||||
* @param {import('express').Request<{}, {}, getPodcastFeedReqBody, {}} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getPodcastFeed(req, res) {
|
||||
var url = req.body.rssFeed
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[PodcastController] Non-admin user "${req.user.username}" attempted to get podcast feed`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const url = validateUrl(req.body.rssFeed)
|
||||
if (!url) {
|
||||
return res.status(400).send('Bad request')
|
||||
return res.status(400).send('Invalid request body. "rssFeed" must be a valid URL')
|
||||
}
|
||||
|
||||
const podcast = await getPodcastFeed(url)
|
||||
|
|
@ -116,6 +131,11 @@ class PodcastController {
|
|||
}
|
||||
|
||||
async getFeedsFromOPMLText(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[PodcastController] Non-admin user "${req.user.username}" attempted to get feeds from opml`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
if (!req.body.opmlText) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
|
@ -184,10 +204,9 @@ class PodcastController {
|
|||
Logger.error(`[PodcastController] Non-admin user attempted to download episodes`, req.user)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
var libraryItem = req.libraryItem
|
||||
|
||||
var episodes = req.body
|
||||
if (!episodes || !episodes.length) {
|
||||
const libraryItem = req.libraryItem
|
||||
const episodes = req.body
|
||||
if (!episodes?.length) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
|
|
@ -286,7 +305,7 @@ class PodcastController {
|
|||
const numItems = pmi.playlist.playlistMediaItems.length - 1
|
||||
|
||||
if (!numItems) {
|
||||
Logger.info(`[PodcastController] Playlist "${playlist.name}" has no more items - removing it`)
|
||||
Logger.info(`[PodcastController] Playlist "${pmi.playlist.name}" has no more items - removing it`)
|
||||
const jsonExpanded = await pmi.playlist.getOldJsonExpanded()
|
||||
SocketAuthority.clientEmitter(pmi.playlist.userId, 'playlist_removed', jsonExpanded)
|
||||
await pmi.playlist.destroy()
|
||||
|
|
|
|||
|
|
@ -3,15 +3,18 @@ const BookFinder = require('../finders/BookFinder')
|
|||
const PodcastFinder = require('../finders/PodcastFinder')
|
||||
const AuthorFinder = require('../finders/AuthorFinder')
|
||||
const MusicFinder = require('../finders/MusicFinder')
|
||||
const Database = require("../Database")
|
||||
|
||||
class SearchController {
|
||||
constructor() { }
|
||||
|
||||
async findBooks(req, res) {
|
||||
const id = req.query.id
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(id)
|
||||
const provider = req.query.provider || 'google'
|
||||
const title = req.query.title || ''
|
||||
const author = req.query.author || ''
|
||||
const results = await BookFinder.search(provider, title, author)
|
||||
const results = await BookFinder.search(libraryItem, provider, title, author)
|
||||
res.json(results)
|
||||
}
|
||||
|
||||
|
|
@ -32,8 +35,19 @@ class SearchController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Find podcast RSS feeds given a term
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async findPodcasts(req, res) {
|
||||
const term = req.query.term
|
||||
if (!term) {
|
||||
Logger.error('[SearchController] Invalid request query param "term" is required')
|
||||
return res.status(400).send('Invalid request query param "term" is required')
|
||||
}
|
||||
|
||||
const results = await PodcastFinder.search(term)
|
||||
res.json(results)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,43 +1,105 @@
|
|||
const Logger = require('../Logger')
|
||||
const Database = require('../Database')
|
||||
const { toNumber } = require('../utils/index')
|
||||
const { toNumber, isUUID } = require('../utils/index')
|
||||
|
||||
class SessionController {
|
||||
constructor() { }
|
||||
|
||||
async findOne(req, res) {
|
||||
return res.json(req.session)
|
||||
return res.json(req.playbackSession)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/sessions
|
||||
* @this import('../routers/ApiRouter')
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getAllWithUserData(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[SessionController] getAllWithUserData: Non-admin user requested all session data ${req.user.id}/"${req.user.username}"`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
let listeningSessions = []
|
||||
if (req.query.user) {
|
||||
listeningSessions = await this.getUserListeningSessionsHelper(req.query.user)
|
||||
} else {
|
||||
listeningSessions = await this.getAllSessionsWithUserData()
|
||||
// Validate "user" query
|
||||
let userId = req.query.user
|
||||
if (userId && !isUUID(userId)) {
|
||||
Logger.warn(`[SessionController] Invalid "user" query string "${userId}"`)
|
||||
userId = null
|
||||
}
|
||||
// Validate "sort" query
|
||||
const validSortOrders = ['displayTitle', 'duration', 'playMethod', 'startTime', 'currentTime', 'timeListening', 'updatedAt', 'createdAt']
|
||||
let orderKey = req.query.sort || 'updatedAt'
|
||||
if (!validSortOrders.includes(orderKey)) {
|
||||
Logger.warn(`[SessionController] Invalid "sort" query string "${orderKey}" (Must be one of "${validSortOrders.join('|')}")`)
|
||||
orderKey = 'updatedAt'
|
||||
}
|
||||
let orderDesc = req.query.desc === '1' ? 'DESC' : 'ASC'
|
||||
// Validate "itemsPerPage" and "page" query
|
||||
let itemsPerPage = toNumber(req.query.itemsPerPage, 10) || 10
|
||||
if (itemsPerPage < 1) {
|
||||
Logger.warn(`[SessionController] Invalid "itemsPerPage" query string "${itemsPerPage}"`)
|
||||
itemsPerPage = 10
|
||||
}
|
||||
let page = toNumber(req.query.page, 0)
|
||||
if (page < 0) {
|
||||
Logger.warn(`[SessionController] Invalid "page" query string "${page}"`)
|
||||
page = 0
|
||||
}
|
||||
|
||||
const itemsPerPage = toNumber(req.query.itemsPerPage, 10) || 10
|
||||
const page = toNumber(req.query.page, 0)
|
||||
let where = null
|
||||
const include = [
|
||||
{
|
||||
model: Database.models.device
|
||||
}
|
||||
]
|
||||
|
||||
const start = page * itemsPerPage
|
||||
const sessions = listeningSessions.slice(start, start + itemsPerPage)
|
||||
if (userId) {
|
||||
where = {
|
||||
userId
|
||||
}
|
||||
} else {
|
||||
include.push({
|
||||
model: Database.userModel,
|
||||
attributes: ['id', 'username']
|
||||
})
|
||||
}
|
||||
|
||||
const { rows, count } = await Database.playbackSessionModel.findAndCountAll({
|
||||
where,
|
||||
include,
|
||||
order: [
|
||||
[orderKey, orderDesc]
|
||||
],
|
||||
limit: itemsPerPage,
|
||||
offset: itemsPerPage * page
|
||||
})
|
||||
|
||||
// Map playback sessions to old playback sessions
|
||||
const sessions = rows.map(session => {
|
||||
const oldPlaybackSession = Database.playbackSessionModel.getOldPlaybackSession(session)
|
||||
if (session.user) {
|
||||
return {
|
||||
...oldPlaybackSession,
|
||||
user: {
|
||||
id: session.user.id,
|
||||
username: session.user.username
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return oldPlaybackSession.toJSON()
|
||||
}
|
||||
})
|
||||
|
||||
const payload = {
|
||||
total: listeningSessions.length,
|
||||
numPages: Math.ceil(listeningSessions.length / itemsPerPage),
|
||||
total: count,
|
||||
numPages: Math.ceil(count / itemsPerPage),
|
||||
page,
|
||||
itemsPerPage,
|
||||
sessions
|
||||
}
|
||||
|
||||
if (req.query.user) {
|
||||
payload.userFilter = req.query.user
|
||||
if (userId) {
|
||||
payload.userId = userId
|
||||
}
|
||||
|
||||
res.json(payload)
|
||||
|
|
@ -63,35 +125,78 @@ class SessionController {
|
|||
}
|
||||
|
||||
async getOpenSession(req, res) {
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(req.session.libraryItemId)
|
||||
const sessionForClient = req.session.toJSONForClient(libraryItem)
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(req.playbackSession.libraryItemId)
|
||||
const sessionForClient = req.playbackSession.toJSONForClient(libraryItem)
|
||||
res.json(sessionForClient)
|
||||
}
|
||||
|
||||
// POST: api/session/:id/sync
|
||||
sync(req, res) {
|
||||
this.playbackSessionManager.syncSessionRequest(req.user, req.session, req.body, res)
|
||||
this.playbackSessionManager.syncSessionRequest(req.user, req.playbackSession, req.body, res)
|
||||
}
|
||||
|
||||
// POST: api/session/:id/close
|
||||
close(req, res) {
|
||||
let syncData = req.body
|
||||
if (syncData && !Object.keys(syncData).length) syncData = null
|
||||
this.playbackSessionManager.closeSessionRequest(req.user, req.session, syncData, res)
|
||||
this.playbackSessionManager.closeSessionRequest(req.user, req.playbackSession, syncData, res)
|
||||
}
|
||||
|
||||
// DELETE: api/session/:id
|
||||
async delete(req, res) {
|
||||
// if session is open then remove it
|
||||
const openSession = this.playbackSessionManager.getSession(req.session.id)
|
||||
const openSession = this.playbackSessionManager.getSession(req.playbackSession.id)
|
||||
if (openSession) {
|
||||
await this.playbackSessionManager.removeSession(req.session.id)
|
||||
await this.playbackSessionManager.removeSession(req.playbackSession.id)
|
||||
}
|
||||
|
||||
await Database.removePlaybackSession(req.session.id)
|
||||
await Database.removePlaybackSession(req.playbackSession.id)
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/sessions/batch/delete
|
||||
* @this import('../routers/ApiRouter')
|
||||
*
|
||||
* @typedef batchDeleteReqBody
|
||||
* @property {string[]} sessions
|
||||
*
|
||||
* @param {import('express').Request<{}, {}, batchDeleteReqBody, {}} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async batchDelete(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[SessionController] Non-admin user attempted to batch delete sessions "${req.user.username}"`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
// Validate session ids
|
||||
if (!req.body.sessions?.length || !Array.isArray(req.body.sessions) || req.body.sessions.some(s => !isUUID(s))) {
|
||||
Logger.error(`[SessionController] Invalid request body. "sessions" array is required`, req.body)
|
||||
return res.status(400).send('Invalid request body. "sessions" array of session id strings is required.')
|
||||
}
|
||||
|
||||
// Check if any of these sessions are open and close it
|
||||
for (const sessionId of req.body.sessions) {
|
||||
const openSession = this.playbackSessionManager.getSession(sessionId)
|
||||
if (openSession) {
|
||||
await this.playbackSessionManager.removeSession(sessionId)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const sessionsRemoved = await Database.playbackSessionModel.destroy({
|
||||
where: {
|
||||
id: req.body.sessions
|
||||
}
|
||||
})
|
||||
Logger.info(`[SessionController] ${sessionsRemoved} playback sessions removed by "${req.user.username}"`)
|
||||
res.sendStatus(200)
|
||||
} catch (error) {
|
||||
Logger.error(`[SessionController] Failed to remove playback sessions`, error)
|
||||
res.status(500).send('Failed to remove sessions')
|
||||
}
|
||||
}
|
||||
|
||||
// POST: api/session/local
|
||||
syncLocal(req, res) {
|
||||
this.playbackSessionManager.syncLocalSessionRequest(req, res)
|
||||
|
|
@ -111,7 +216,7 @@ class SessionController {
|
|||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
req.session = playbackSession
|
||||
req.playbackSession = playbackSession
|
||||
next()
|
||||
}
|
||||
|
||||
|
|
@ -130,7 +235,7 @@ class SessionController {
|
|||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
req.session = playbackSession
|
||||
req.playbackSession = playbackSession
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ class UserController {
|
|||
account.id = uuidv4()
|
||||
account.pash = await this.auth.hashPass(account.password)
|
||||
delete account.password
|
||||
account.token = await this.auth.generateAccessToken({ userId: account.id, username })
|
||||
account.token = await this.auth.generateAccessToken(account)
|
||||
account.createdAt = Date.now()
|
||||
const newUser = new User(account)
|
||||
|
||||
|
|
@ -115,6 +115,13 @@ class UserController {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH: /api/users/:id
|
||||
* Update user
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async update(req, res) {
|
||||
const user = req.reqUser
|
||||
|
||||
|
|
@ -126,6 +133,7 @@ class UserController {
|
|||
var account = req.body
|
||||
var shouldUpdateToken = false
|
||||
|
||||
// When changing username create a new API token
|
||||
if (account.username !== undefined && account.username !== user.username) {
|
||||
const usernameExists = await Database.userModel.getUserByUsername(account.username)
|
||||
if (usernameExists) {
|
||||
|
|
@ -142,7 +150,7 @@ class UserController {
|
|||
|
||||
if (user.update(account)) {
|
||||
if (shouldUpdateToken) {
|
||||
user.token = await this.auth.generateAccessToken({ userId: user.id, username: user.username })
|
||||
user.token = await this.auth.generateAccessToken(user)
|
||||
Logger.info(`[UserController] User ${user.username} was generated a new api token`)
|
||||
}
|
||||
await Database.updateUser(user)
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
const itemDb = require('../db/item.db')
|
||||
|
||||
const getLibraryItem = async (req, res) => {
|
||||
let libraryItem = null
|
||||
if (req.query.expanded == 1) {
|
||||
libraryItem = await itemDb.getLibraryItemExpanded(req.params.id)
|
||||
} else {
|
||||
libraryItem = await itemDb.getLibraryItemMinified(req.params.id)
|
||||
}
|
||||
|
||||
res.json(libraryItem)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLibraryItem
|
||||
}
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
/**
|
||||
* TODO: Unused for testing
|
||||
*/
|
||||
const { Sequelize } = require('sequelize')
|
||||
const Database = require('../Database')
|
||||
|
||||
const getLibraryItemMinified = (libraryItemId) => {
|
||||
return Database.libraryItemModel.findByPk(libraryItemId, {
|
||||
include: [
|
||||
{
|
||||
model: Database.bookModel,
|
||||
attributes: [
|
||||
'id', 'title', 'subtitle', 'publishedYear', 'publishedDate', 'publisher', 'description', 'isbn', 'asin', 'language', 'explicit', 'narrators', 'coverPath', 'genres', 'tags'
|
||||
],
|
||||
include: [
|
||||
{
|
||||
model: Database.authorModel,
|
||||
attributes: ['id', 'name'],
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
{
|
||||
model: Database.seriesModel,
|
||||
attributes: ['id', 'name'],
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
model: Database.podcastModel,
|
||||
attributes: [
|
||||
'id', 'title', 'author', 'releaseDate', 'feedURL', 'imageURL', 'description', 'itunesPageURL', 'itunesId', 'itunesArtistId', 'language', 'podcastType', 'explicit', 'autoDownloadEpisodes', 'genres', 'tags',
|
||||
[Sequelize.literal('(SELECT COUNT(*) FROM "podcastEpisodes" WHERE "podcastEpisodes"."podcastId" = podcast.id)'), 'numPodcastEpisodes']
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
const getLibraryItemExpanded = (libraryItemId) => {
|
||||
return Database.libraryItemModel.findByPk(libraryItemId, {
|
||||
include: [
|
||||
{
|
||||
model: Database.bookModel,
|
||||
include: [
|
||||
{
|
||||
model: Database.authorModel,
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
{
|
||||
model: Database.seriesModel,
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
model: Database.podcastModel,
|
||||
include: [
|
||||
{
|
||||
model: Database.podcastEpisodeModel
|
||||
}
|
||||
]
|
||||
},
|
||||
'libraryFolder',
|
||||
'library'
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLibraryItemMinified,
|
||||
getLibraryItemExpanded
|
||||
}
|
||||
|
|
@ -3,20 +3,13 @@ const Logger = require('../Logger')
|
|||
const Path = require('path')
|
||||
const Audnexus = require('../providers/Audnexus')
|
||||
|
||||
const { downloadFile } = require('../utils/fileUtils')
|
||||
const { downloadImageFile } = require('../utils/fileUtils')
|
||||
|
||||
class AuthorFinder {
|
||||
constructor() {
|
||||
this.audnexus = new Audnexus()
|
||||
}
|
||||
|
||||
async downloadImage(url, outputPath) {
|
||||
return downloadFile(url, outputPath).then(() => true).catch((error) => {
|
||||
Logger.error('[AuthorFinder] Failed to download author image', error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
findAuthorByASIN(asin, region) {
|
||||
if (!asin) return null
|
||||
return this.audnexus.findAuthorByASIN(asin, region)
|
||||
|
|
@ -33,28 +26,36 @@ class AuthorFinder {
|
|||
return author
|
||||
}
|
||||
|
||||
/**
|
||||
* Download author image from url and save in authors folder
|
||||
*
|
||||
* @param {string} authorId
|
||||
* @param {string} url
|
||||
* @returns {Promise<{path:string, error:string}>}
|
||||
*/
|
||||
async saveAuthorImage(authorId, url) {
|
||||
var authorDir = Path.join(global.MetadataPath, 'authors')
|
||||
var relAuthorDir = Path.posix.join('/metadata', 'authors')
|
||||
const authorDir = Path.join(global.MetadataPath, 'authors')
|
||||
|
||||
if (!await fs.pathExists(authorDir)) {
|
||||
await fs.ensureDir(authorDir)
|
||||
}
|
||||
|
||||
var imageExtension = url.toLowerCase().split('.').pop()
|
||||
var ext = imageExtension === 'png' ? 'png' : 'jpg'
|
||||
var filename = authorId + '.' + ext
|
||||
var outputPath = Path.posix.join(authorDir, filename)
|
||||
var relPath = Path.posix.join(relAuthorDir, filename)
|
||||
const imageExtension = url.toLowerCase().split('.').pop()
|
||||
const ext = imageExtension === 'png' ? 'png' : 'jpg'
|
||||
const filename = authorId + '.' + ext
|
||||
const outputPath = Path.posix.join(authorDir, filename)
|
||||
|
||||
var success = await this.downloadImage(url, outputPath)
|
||||
if (!success) {
|
||||
return null
|
||||
}
|
||||
return {
|
||||
path: outputPath,
|
||||
relPath
|
||||
}
|
||||
return downloadImageFile(url, outputPath).then(() => {
|
||||
return {
|
||||
path: outputPath
|
||||
}
|
||||
}).catch((err) => {
|
||||
let errorMsg = err.message || 'Unknown error'
|
||||
Logger.error(`[AuthorFinder] Download image file failed for "${url}"`, errorMsg)
|
||||
return {
|
||||
error: errorMsg
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = new AuthorFinder()
|
||||
|
|
@ -6,7 +6,7 @@ const Audnexus = require('../providers/Audnexus')
|
|||
const FantLab = require('../providers/FantLab')
|
||||
const AudiobookCovers = require('../providers/AudiobookCovers')
|
||||
const Logger = require('../Logger')
|
||||
const { levenshteinDistance } = require('../utils/index')
|
||||
const { levenshteinDistance, escapeRegExp } = require('../utils/index')
|
||||
|
||||
class BookFinder {
|
||||
constructor() {
|
||||
|
|
@ -31,47 +31,11 @@ class BookFinder {
|
|||
return book
|
||||
}
|
||||
|
||||
stripSubtitle(title) {
|
||||
if (title.includes(':')) {
|
||||
return title.split(':')[0].trim()
|
||||
} else if (title.includes(' - ')) {
|
||||
return title.split(' - ')[0].trim()
|
||||
}
|
||||
return title
|
||||
}
|
||||
|
||||
replaceAccentedChars(str) {
|
||||
try {
|
||||
return str.normalize('NFD').replace(/[\u0300-\u036f]/g, "")
|
||||
} catch (error) {
|
||||
Logger.error('[BookFinder] str normalize error', error)
|
||||
return str
|
||||
}
|
||||
}
|
||||
|
||||
cleanTitleForCompares(title) {
|
||||
if (!title) return ''
|
||||
// Remove subtitle if there (i.e. "Cool Book: Coolest Ever" becomes "Cool Book")
|
||||
let stripped = this.stripSubtitle(title)
|
||||
|
||||
// Remove text in paranthesis (i.e. "Ender's Game (Ender's Saga)" becomes "Ender's Game")
|
||||
let cleaned = stripped.replace(/ *\([^)]*\) */g, "")
|
||||
|
||||
// Remove single quotes (i.e. "Ender's Game" becomes "Enders Game")
|
||||
cleaned = cleaned.replace(/'/g, '')
|
||||
return this.replaceAccentedChars(cleaned)
|
||||
}
|
||||
|
||||
cleanAuthorForCompares(author) {
|
||||
if (!author) return ''
|
||||
return this.replaceAccentedChars(author)
|
||||
}
|
||||
|
||||
filterSearchResults(books, title, author, maxTitleDistance, maxAuthorDistance) {
|
||||
var searchTitle = this.cleanTitleForCompares(title)
|
||||
var searchAuthor = this.cleanAuthorForCompares(author)
|
||||
var searchTitle = cleanTitleForCompares(title)
|
||||
var searchAuthor = cleanAuthorForCompares(author)
|
||||
return books.map(b => {
|
||||
b.cleanedTitle = this.cleanTitleForCompares(b.title)
|
||||
b.cleanedTitle = cleanTitleForCompares(b.title)
|
||||
b.titleDistance = levenshteinDistance(b.cleanedTitle, title)
|
||||
|
||||
// Total length of search (title or both title & author)
|
||||
|
|
@ -82,7 +46,7 @@ class BookFinder {
|
|||
b.authorDistance = author.length
|
||||
} else {
|
||||
b.totalPossibleDistance += b.author.length
|
||||
b.cleanedAuthor = this.cleanAuthorForCompares(b.author)
|
||||
b.cleanedAuthor = cleanAuthorForCompares(b.author)
|
||||
|
||||
var cleanedAuthorDistance = levenshteinDistance(b.cleanedAuthor, searchAuthor)
|
||||
var authorDistance = levenshteinDistance(b.author || '', author)
|
||||
|
|
@ -136,6 +100,10 @@ class BookFinder {
|
|||
if (!booksFiltered.length && books.length) {
|
||||
if (this.verbose) Logger.debug(`Search has ${books.length} matches, but no close title matches`)
|
||||
}
|
||||
booksFiltered.sort((a, b) => {
|
||||
return a.totalDistance - b.totalDistance
|
||||
})
|
||||
|
||||
return booksFiltered
|
||||
}
|
||||
|
||||
|
|
@ -179,38 +147,159 @@ class BookFinder {
|
|||
return books
|
||||
}
|
||||
|
||||
addTitleCandidate(title, candidates) {
|
||||
// Main variant
|
||||
const cleanTitle = this.cleanTitleForCompares(title).trim()
|
||||
if (!cleanTitle) return
|
||||
candidates.add(cleanTitle)
|
||||
static TitleCandidates = class {
|
||||
|
||||
let candidate = cleanTitle
|
||||
constructor(cleanAuthor) {
|
||||
this.candidates = new Set()
|
||||
this.cleanAuthor = cleanAuthor
|
||||
this.priorities = {}
|
||||
this.positions = {}
|
||||
this.currentPosition = 0
|
||||
}
|
||||
|
||||
// Remove subtitle
|
||||
candidate = candidate.replace(/([,:;_]| by ).*/g, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
add(title) {
|
||||
// if title contains the author, remove it
|
||||
title = this.#removeAuthorFromTitle(title)
|
||||
|
||||
// Remove preceding/trailing numbers
|
||||
candidate = candidate.replace(/^\d+ | \d+$/g, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
const titleTransformers = [
|
||||
[/([,:;_]| by ).*/g, ''], // Remove subtitle
|
||||
[/(^| )\d+k(bps)?( |$)/, ' '], // Remove bitrate
|
||||
[/ (2nd|3rd|\d+th)\s+ed(\.|ition)?/g, ''], // Remove edition
|
||||
[/(^| |\.)(m4b|m4a|mp3)( |$)/g, ''], // Remove file-type
|
||||
[/ a novel.*$/g, ''], // Remove "a novel"
|
||||
[/(^| )(un)?abridged( |$)/g, ' '], // Remove "unabridged/abridged"
|
||||
[/^\d+ | \d+$/g, ''], // Remove preceding/trailing numbers
|
||||
]
|
||||
|
||||
// Remove bitrate
|
||||
candidate = candidate.replace(/(^| )\d+k(bps)?( |$)/, " ").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
// Main variant
|
||||
const cleanTitle = cleanTitleForCompares(title).trim()
|
||||
if (!cleanTitle) return
|
||||
this.candidates.add(cleanTitle)
|
||||
this.priorities[cleanTitle] = 0
|
||||
this.positions[cleanTitle] = this.currentPosition
|
||||
|
||||
// Remove edition
|
||||
candidate = candidate.replace(/ (2nd|3rd|\d+th)\s+ed(\.|ition)?/, "").trim()
|
||||
if (candidate)
|
||||
candidates.add(candidate)
|
||||
let candidate = cleanTitle
|
||||
|
||||
for (const transformer of titleTransformers)
|
||||
candidate = candidate.replace(transformer[0], transformer[1]).trim()
|
||||
|
||||
if (candidate != cleanTitle) {
|
||||
if (candidate) {
|
||||
this.candidates.add(candidate)
|
||||
this.priorities[candidate] = 0
|
||||
this.positions[candidate] = this.currentPosition
|
||||
}
|
||||
this.priorities[cleanTitle] = 1
|
||||
}
|
||||
this.currentPosition++
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.candidates.size
|
||||
}
|
||||
|
||||
getCandidates() {
|
||||
var candidates = [...this.candidates]
|
||||
candidates.sort((a, b) => {
|
||||
// Candidates that include only digits are also likely low quality
|
||||
const onlyDigits = /^\d+$/
|
||||
const includesOnlyDigitsDiff = onlyDigits.test(a) - onlyDigits.test(b)
|
||||
if (includesOnlyDigitsDiff) return includesOnlyDigitsDiff
|
||||
// transformed candidates receive higher priority
|
||||
const priorityDiff = this.priorities[a] - this.priorities[b]
|
||||
if (priorityDiff) return priorityDiff
|
||||
// if same priorirty, prefer candidates that are closer to the beginning (e.g. titles before subtitles)
|
||||
const positionDiff = this.positions[a] - this.positions[b]
|
||||
return positionDiff // candidates with same priority always have different positions
|
||||
})
|
||||
Logger.debug(`[${this.constructor.name}] Found ${candidates.length} fuzzy title candidates`)
|
||||
Logger.debug(candidates)
|
||||
return candidates
|
||||
}
|
||||
|
||||
delete(title) {
|
||||
return this.candidates.delete(title)
|
||||
}
|
||||
|
||||
#removeAuthorFromTitle(title) {
|
||||
if (!this.cleanAuthor) return title
|
||||
const authorRe = new RegExp(`(^| | by |)${escapeRegExp(this.cleanAuthor)}(?= |$)`, "g")
|
||||
const authorCleanedTitle = cleanAuthorForCompares(title)
|
||||
const authorCleanedTitleWithoutAuthor = authorCleanedTitle.replace(authorRe, '')
|
||||
if (authorCleanedTitleWithoutAuthor !== authorCleanedTitle) {
|
||||
return authorCleanedTitleWithoutAuthor.trim()
|
||||
}
|
||||
return title
|
||||
}
|
||||
}
|
||||
|
||||
static AuthorCandidates = class {
|
||||
constructor(cleanAuthor, audnexus) {
|
||||
this.audnexus = audnexus
|
||||
this.candidates = new Set()
|
||||
this.cleanAuthor = cleanAuthor
|
||||
if (cleanAuthor) this.candidates.add(cleanAuthor)
|
||||
}
|
||||
|
||||
validateAuthor(name, region = '', maxLevenshtein = 2) {
|
||||
return this.audnexus.authorASINsRequest(name, region).then((asins) => {
|
||||
for (const [i, asin] of asins.entries()) {
|
||||
if (i > 10) break
|
||||
let cleanName = cleanAuthorForCompares(asin.name)
|
||||
if (!cleanName) continue
|
||||
if (cleanName.includes(name)) return name
|
||||
if (name.includes(cleanName)) return cleanName
|
||||
if (levenshteinDistance(cleanName, name) <= maxLevenshtein) return cleanName
|
||||
}
|
||||
return ''
|
||||
})
|
||||
}
|
||||
|
||||
add(author) {
|
||||
const cleanAuthor = cleanAuthorForCompares(author).trim()
|
||||
if (!cleanAuthor) return
|
||||
this.candidates.add(cleanAuthor)
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.candidates.size
|
||||
}
|
||||
|
||||
get agressivelyCleanAuthor() {
|
||||
if (this.cleanAuthor) {
|
||||
const agressivelyCleanAuthor = this.cleanAuthor.replace(/[,/-].*$/, '').trim()
|
||||
return agressivelyCleanAuthor ? agressivelyCleanAuthor : this.cleanAuthor
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
async getCandidates() {
|
||||
var filteredCandidates = []
|
||||
var promises = []
|
||||
for (const candidate of this.candidates) {
|
||||
promises.push(this.validateAuthor(candidate))
|
||||
}
|
||||
const results = [...new Set(await Promise.all(promises))]
|
||||
filteredCandidates = results.filter(author => author)
|
||||
// If no valid candidates were found, add back an aggresively cleaned author version
|
||||
if (!filteredCandidates.length && this.cleanAuthor) filteredCandidates.push(this.agressivelyCleanAuthor)
|
||||
// Always add an empty author candidate
|
||||
filteredCandidates.push('')
|
||||
Logger.debug(`[${this.constructor.name}] Found ${filteredCandidates.length} fuzzy author candidates`)
|
||||
Logger.debug(filteredCandidates)
|
||||
return filteredCandidates
|
||||
}
|
||||
|
||||
delete(author) {
|
||||
return this.candidates.delete(author)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Search for books including fuzzy searches
|
||||
*
|
||||
* @param {Object} libraryItem
|
||||
* @param {string} provider
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
|
|
@ -219,7 +308,7 @@ class BookFinder {
|
|||
* @param {{titleDistance:number, authorDistance:number, maxFuzzySearches:number}} options
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async search(provider, title, author, isbn, asin, options = {}) {
|
||||
async search(libraryItem, provider, title, author, isbn, asin, options = {}) {
|
||||
let books = []
|
||||
const maxTitleDistance = !isNaN(options.titleDistance) ? Number(options.titleDistance) : 4
|
||||
const maxAuthorDistance = !isNaN(options.authorDistance) ? Number(options.authorDistance) : 4
|
||||
|
|
@ -232,62 +321,51 @@ class BookFinder {
|
|||
books = await this.runSearch(title, author, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
|
||||
if (!books.length && maxFuzzySearches > 0) {
|
||||
// normalize title and author
|
||||
// Normalize title and author
|
||||
title = title.trim().toLowerCase()
|
||||
author = author?.trim().toLowerCase() || ''
|
||||
|
||||
// Now run up to maxFuzzySearches fuzzy searches
|
||||
let candidates = new Set()
|
||||
let cleanedAuthor = this.cleanAuthorForCompares(author)
|
||||
this.addTitleCandidate(title, candidates)
|
||||
const cleanAuthor = cleanAuthorForCompares(author)
|
||||
|
||||
// remove parentheses and their contents, and replace with a separator
|
||||
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}/g, " - ")
|
||||
// Now run up to maxFuzzySearches fuzzy searches
|
||||
let authorCandidates = new BookFinder.AuthorCandidates(cleanAuthor, this.audnexus)
|
||||
|
||||
// Remove underscores and parentheses with their contents, and replace with a separator
|
||||
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}|_/g, " - ")
|
||||
// Split title into hypen-separated parts
|
||||
const titleParts = cleanTitle.split(/ - | -|- /)
|
||||
for (const titlePart of titleParts) {
|
||||
this.addTitleCandidate(titlePart, candidates)
|
||||
for (const titlePart of titleParts)
|
||||
authorCandidates.add(titlePart)
|
||||
authorCandidates = await authorCandidates.getCandidates()
|
||||
loop_author:
|
||||
for (const authorCandidate of authorCandidates) {
|
||||
let titleCandidates = new BookFinder.TitleCandidates(authorCandidate)
|
||||
for (const titlePart of titleParts)
|
||||
titleCandidates.add(titlePart)
|
||||
titleCandidates = titleCandidates.getCandidates()
|
||||
for (const titleCandidate of titleCandidates) {
|
||||
if (titleCandidate == title && authorCandidate == author) continue // We already tried this
|
||||
if (++numFuzzySearches > maxFuzzySearches) break loop_author
|
||||
books = await this.runSearch(titleCandidate, authorCandidate, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break loop_author
|
||||
}
|
||||
}
|
||||
// We already searched for original title
|
||||
if (author == cleanedAuthor) candidates.delete(title)
|
||||
if (candidates.size > 0) {
|
||||
candidates = [...candidates]
|
||||
candidates.sort((a, b) => {
|
||||
// Candidates that include the author are likely low quality
|
||||
const includesAuthorDiff = !b.includes(cleanedAuthor) - !a.includes(cleanedAuthor)
|
||||
if (includesAuthorDiff) return includesAuthorDiff
|
||||
// Candidates that include only digits are also likely low quality
|
||||
const onlyDigits = /^\d+$/
|
||||
const includesOnlyDigitsDiff = !onlyDigits.test(b) - !onlyDigits.test(a)
|
||||
if (includesOnlyDigitsDiff) return includesOnlyDigitsDiff
|
||||
// Start with longer candidaets, as they are likely more specific
|
||||
const lengthDiff = b.length - a.length
|
||||
if (lengthDiff) return lengthDiff
|
||||
return b.localeCompare(a)
|
||||
}
|
||||
|
||||
if (books.length) {
|
||||
const resultsHaveDuration = provider.startsWith('audible')
|
||||
if (resultsHaveDuration && libraryItem?.media?.duration) {
|
||||
const libraryItemDurationMinutes = libraryItem.media.duration / 60
|
||||
// If provider results have duration, sort by ascendinge duration difference from libraryItem
|
||||
books.sort((a, b) => {
|
||||
const aDuration = a.duration || Number.POSITIVE_INFINITY
|
||||
const bDuration = b.duration || Number.POSITIVE_INFINITY
|
||||
const aDurationDiff = Math.abs(aDuration - libraryItemDurationMinutes)
|
||||
const bDurationDiff = Math.abs(bDuration - libraryItemDurationMinutes)
|
||||
return aDurationDiff - bDurationDiff
|
||||
})
|
||||
Logger.debug(`[BookFinder] Found ${candidates.length} fuzzy title candidates`, candidates)
|
||||
for (const candidate of candidates) {
|
||||
if (++numFuzzySearches > maxFuzzySearches) return books
|
||||
books = await this.runSearch(candidate, cleanedAuthor, provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break
|
||||
}
|
||||
if (!books.length) {
|
||||
// Now try searching without the author
|
||||
for (const candidate of candidates) {
|
||||
if (++numFuzzySearches > maxFuzzySearches) return books
|
||||
books = await this.runSearch(candidate, '', provider, asin, maxTitleDistance, maxAuthorDistance)
|
||||
if (books.length) break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'openlibrary') {
|
||||
books.sort((a, b) => {
|
||||
return a.totalDistance - b.totalDistance
|
||||
})
|
||||
}
|
||||
|
||||
return books
|
||||
}
|
||||
|
||||
|
|
@ -331,12 +409,12 @@ class BookFinder {
|
|||
|
||||
if (provider === 'all') {
|
||||
for (const providerString of this.providers) {
|
||||
const providerResults = await this.search(providerString, title, author, options)
|
||||
const providerResults = await this.search(null, providerString, title, author, options)
|
||||
Logger.debug(`[BookFinder] Found ${providerResults.length} covers from ${providerString}`)
|
||||
searchResults.push(...providerResults)
|
||||
}
|
||||
} else {
|
||||
searchResults = await this.search(provider, title, author, options)
|
||||
searchResults = await this.search(null, provider, title, author, options)
|
||||
}
|
||||
Logger.debug(`[BookFinder] FindCovers search results: ${searchResults.length}`)
|
||||
|
||||
|
|
@ -357,3 +435,54 @@ class BookFinder {
|
|||
}
|
||||
}
|
||||
module.exports = new BookFinder()
|
||||
|
||||
function stripSubtitle(title) {
|
||||
if (title.includes(':')) {
|
||||
return title.split(':')[0].trim()
|
||||
} else if (title.includes(' - ')) {
|
||||
return title.split(' - ')[0].trim()
|
||||
}
|
||||
return title
|
||||
}
|
||||
|
||||
function replaceAccentedChars(str) {
|
||||
try {
|
||||
return str.normalize('NFD').replace(/[\u0300-\u036f]/g, "")
|
||||
} catch (error) {
|
||||
Logger.error('[BookFinder] str normalize error', error)
|
||||
return str
|
||||
}
|
||||
}
|
||||
|
||||
function cleanTitleForCompares(title) {
|
||||
if (!title) return ''
|
||||
title = stripRedundantSpaces(title)
|
||||
|
||||
// Remove subtitle if there (i.e. "Cool Book: Coolest Ever" becomes "Cool Book")
|
||||
let stripped = stripSubtitle(title)
|
||||
|
||||
// Remove text in paranthesis (i.e. "Ender's Game (Ender's Saga)" becomes "Ender's Game")
|
||||
let cleaned = stripped.replace(/ *\([^)]*\) */g, "")
|
||||
|
||||
// Remove single quotes (i.e. "Ender's Game" becomes "Enders Game")
|
||||
cleaned = cleaned.replace(/'/g, '')
|
||||
return replaceAccentedChars(cleaned).toLowerCase()
|
||||
}
|
||||
|
||||
function cleanAuthorForCompares(author) {
|
||||
if (!author) return ''
|
||||
author = stripRedundantSpaces(author)
|
||||
|
||||
let cleanAuthor = replaceAccentedChars(author).toLowerCase()
|
||||
// separate initials
|
||||
cleanAuthor = cleanAuthor.replace(/([a-z])\.([a-z])/g, '$1. $2')
|
||||
// remove middle initials
|
||||
cleanAuthor = cleanAuthor.replace(/(?<=\w\w)(\s+[a-z]\.?)+(?=\s+\w\w)/g, '')
|
||||
// remove et al.
|
||||
cleanAuthor = cleanAuthor.replace(/ et al\.?(?= |$)/g, '')
|
||||
return cleanAuthor
|
||||
}
|
||||
|
||||
function stripRedundantSpaces(str) {
|
||||
return str.replace(/\s+/g, ' ').trim()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
exports.parse = require('./parse');
|
||||
exports.stringify = require('./stringify');
|
||||
|
|
@ -1,603 +0,0 @@
|
|||
// http://www.w3.org/TR/CSS21/grammar.html
|
||||
// https://github.com/visionmedia/css-parse/pull/49#issuecomment-30088027
|
||||
var commentre = /\/\*[^*]*\*+([^/*][^*]*\*+)*\//g
|
||||
|
||||
module.exports = function(css, options){
|
||||
options = options || {};
|
||||
|
||||
/**
|
||||
* Positional.
|
||||
*/
|
||||
|
||||
var lineno = 1;
|
||||
var column = 1;
|
||||
|
||||
/**
|
||||
* Update lineno and column based on `str`.
|
||||
*/
|
||||
|
||||
function updatePosition(str) {
|
||||
var lines = str.match(/\n/g);
|
||||
if (lines) lineno += lines.length;
|
||||
var i = str.lastIndexOf('\n');
|
||||
column = ~i ? str.length - i : column + str.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark position and patch `node.position`.
|
||||
*/
|
||||
|
||||
function position() {
|
||||
var start = { line: lineno, column: column };
|
||||
return function(node){
|
||||
node.position = new Position(start);
|
||||
whitespace();
|
||||
return node;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Store position information for a node
|
||||
*/
|
||||
|
||||
function Position(start) {
|
||||
this.start = start;
|
||||
this.end = { line: lineno, column: column };
|
||||
this.source = options.source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Non-enumerable source string
|
||||
*/
|
||||
|
||||
Position.prototype.content = css;
|
||||
|
||||
/**
|
||||
* Error `msg`.
|
||||
*/
|
||||
|
||||
var errorsList = [];
|
||||
|
||||
function error(msg) {
|
||||
var err = new Error(options.source + ':' + lineno + ':' + column + ': ' + msg);
|
||||
err.reason = msg;
|
||||
err.filename = options.source;
|
||||
err.line = lineno;
|
||||
err.column = column;
|
||||
err.source = css;
|
||||
|
||||
if (options.silent) {
|
||||
errorsList.push(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse stylesheet.
|
||||
*/
|
||||
|
||||
function stylesheet() {
|
||||
var rulesList = rules();
|
||||
|
||||
return {
|
||||
type: 'stylesheet',
|
||||
stylesheet: {
|
||||
source: options.source,
|
||||
rules: rulesList,
|
||||
parsingErrors: errorsList
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Opening brace.
|
||||
*/
|
||||
|
||||
function open() {
|
||||
return match(/^{\s*/);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closing brace.
|
||||
*/
|
||||
|
||||
function close() {
|
||||
return match(/^}/);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse ruleset.
|
||||
*/
|
||||
|
||||
function rules() {
|
||||
var node;
|
||||
var rules = [];
|
||||
whitespace();
|
||||
comments(rules);
|
||||
while (css.length && css.charAt(0) != '}' && (node = atrule() || rule())) {
|
||||
if (node !== false) {
|
||||
rules.push(node);
|
||||
comments(rules);
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Match `re` and return captures.
|
||||
*/
|
||||
|
||||
function match(re) {
|
||||
var m = re.exec(css);
|
||||
if (!m) return;
|
||||
var str = m[0];
|
||||
updatePosition(str);
|
||||
css = css.slice(str.length);
|
||||
return m;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse whitespace.
|
||||
*/
|
||||
|
||||
function whitespace() {
|
||||
match(/^\s*/);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comments;
|
||||
*/
|
||||
|
||||
function comments(rules) {
|
||||
var c;
|
||||
rules = rules || [];
|
||||
while (c = comment()) {
|
||||
if (c !== false) {
|
||||
rules.push(c);
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comment.
|
||||
*/
|
||||
|
||||
function comment() {
|
||||
var pos = position();
|
||||
if ('/' != css.charAt(0) || '*' != css.charAt(1)) return;
|
||||
|
||||
var i = 2;
|
||||
while ("" != css.charAt(i) && ('*' != css.charAt(i) || '/' != css.charAt(i + 1))) ++i;
|
||||
i += 2;
|
||||
|
||||
if ("" === css.charAt(i-1)) {
|
||||
return error('End of comment missing');
|
||||
}
|
||||
|
||||
var str = css.slice(2, i - 2);
|
||||
column += 2;
|
||||
updatePosition(str);
|
||||
css = css.slice(i);
|
||||
column += 2;
|
||||
|
||||
return pos({
|
||||
type: 'comment',
|
||||
comment: str
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse selector.
|
||||
*/
|
||||
|
||||
function selector() {
|
||||
var m = match(/^([^{]+)/);
|
||||
if (!m) return;
|
||||
/* @fix Remove all comments from selectors
|
||||
* http://ostermiller.org/findcomment.html */
|
||||
return trim(m[0])
|
||||
.replace(/\/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*\/+/g, '')
|
||||
.replace(/"(?:\\"|[^"])*"|'(?:\\'|[^'])*'/g, function(m) {
|
||||
return m.replace(/,/g, '\u200C');
|
||||
})
|
||||
.split(/\s*(?![^(]*\)),\s*/)
|
||||
.map(function(s) {
|
||||
return s.replace(/\u200C/g, ',');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse declaration.
|
||||
*/
|
||||
|
||||
function declaration() {
|
||||
var pos = position();
|
||||
|
||||
// prop
|
||||
var prop = match(/^(\*?[-#\/\*\\\w]+(\[[0-9a-z_-]+\])?)\s*/);
|
||||
if (!prop) return;
|
||||
prop = trim(prop[0]);
|
||||
|
||||
// :
|
||||
if (!match(/^:\s*/)) return error("property missing ':'");
|
||||
|
||||
// val
|
||||
var val = match(/^((?:'(?:\\'|.)*?'|"(?:\\"|.)*?"|\([^\)]*?\)|[^};])+)/);
|
||||
|
||||
var ret = pos({
|
||||
type: 'declaration',
|
||||
property: prop.replace(commentre, ''),
|
||||
value: val ? trim(val[0]).replace(commentre, '') : ''
|
||||
});
|
||||
|
||||
// ;
|
||||
match(/^[;\s]*/);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse declarations.
|
||||
*/
|
||||
|
||||
function declarations() {
|
||||
var decls = [];
|
||||
|
||||
if (!open()) return error("missing '{'");
|
||||
comments(decls);
|
||||
|
||||
// declarations
|
||||
var decl;
|
||||
while (decl = declaration()) {
|
||||
if (decl !== false) {
|
||||
decls.push(decl);
|
||||
comments(decls);
|
||||
}
|
||||
}
|
||||
|
||||
if (!close()) return error("missing '}'");
|
||||
return decls;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse keyframe.
|
||||
*/
|
||||
|
||||
function keyframe() {
|
||||
var m;
|
||||
var vals = [];
|
||||
var pos = position();
|
||||
|
||||
while (m = match(/^((\d+\.\d+|\.\d+|\d+)%?|[a-z]+)\s*/)) {
|
||||
vals.push(m[1]);
|
||||
match(/^,\s*/);
|
||||
}
|
||||
|
||||
if (!vals.length) return;
|
||||
|
||||
return pos({
|
||||
type: 'keyframe',
|
||||
values: vals,
|
||||
declarations: declarations()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse keyframes.
|
||||
*/
|
||||
|
||||
function atkeyframes() {
|
||||
var pos = position();
|
||||
var m = match(/^@([-\w]+)?keyframes\s*/);
|
||||
|
||||
if (!m) return;
|
||||
var vendor = m[1];
|
||||
|
||||
// identifier
|
||||
var m = match(/^([-\w]+)\s*/);
|
||||
if (!m) return error("@keyframes missing name");
|
||||
var name = m[1];
|
||||
|
||||
if (!open()) return error("@keyframes missing '{'");
|
||||
|
||||
var frame;
|
||||
var frames = comments();
|
||||
while (frame = keyframe()) {
|
||||
frames.push(frame);
|
||||
frames = frames.concat(comments());
|
||||
}
|
||||
|
||||
if (!close()) return error("@keyframes missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'keyframes',
|
||||
name: name,
|
||||
vendor: vendor,
|
||||
keyframes: frames
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse supports.
|
||||
*/
|
||||
|
||||
function atsupports() {
|
||||
var pos = position();
|
||||
var m = match(/^@supports *([^{]+)/);
|
||||
|
||||
if (!m) return;
|
||||
var supports = trim(m[1]);
|
||||
|
||||
if (!open()) return error("@supports missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@supports missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'supports',
|
||||
supports: supports,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse host.
|
||||
*/
|
||||
|
||||
function athost() {
|
||||
var pos = position();
|
||||
var m = match(/^@host\s*/);
|
||||
|
||||
if (!m) return;
|
||||
|
||||
if (!open()) return error("@host missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@host missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'host',
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse media.
|
||||
*/
|
||||
|
||||
function atmedia() {
|
||||
var pos = position();
|
||||
var m = match(/^@media *([^{]+)/);
|
||||
|
||||
if (!m) return;
|
||||
var media = trim(m[1]);
|
||||
|
||||
if (!open()) return error("@media missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@media missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'media',
|
||||
media: media,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse custom-media.
|
||||
*/
|
||||
|
||||
function atcustommedia() {
|
||||
var pos = position();
|
||||
var m = match(/^@custom-media\s+(--[^\s]+)\s*([^{;]+);/);
|
||||
if (!m) return;
|
||||
|
||||
return pos({
|
||||
type: 'custom-media',
|
||||
name: trim(m[1]),
|
||||
media: trim(m[2])
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse paged media.
|
||||
*/
|
||||
|
||||
function atpage() {
|
||||
var pos = position();
|
||||
var m = match(/^@page */);
|
||||
if (!m) return;
|
||||
|
||||
var sel = selector() || [];
|
||||
|
||||
if (!open()) return error("@page missing '{'");
|
||||
var decls = comments();
|
||||
|
||||
// declarations
|
||||
var decl;
|
||||
while (decl = declaration()) {
|
||||
decls.push(decl);
|
||||
decls = decls.concat(comments());
|
||||
}
|
||||
|
||||
if (!close()) return error("@page missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'page',
|
||||
selectors: sel,
|
||||
declarations: decls
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse document.
|
||||
*/
|
||||
|
||||
function atdocument() {
|
||||
var pos = position();
|
||||
var m = match(/^@([-\w]+)?document *([^{]+)/);
|
||||
if (!m) return;
|
||||
|
||||
var vendor = trim(m[1]);
|
||||
var doc = trim(m[2]);
|
||||
|
||||
if (!open()) return error("@document missing '{'");
|
||||
|
||||
var style = comments().concat(rules());
|
||||
|
||||
if (!close()) return error("@document missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'document',
|
||||
document: doc,
|
||||
vendor: vendor,
|
||||
rules: style
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse font-face.
|
||||
*/
|
||||
|
||||
function atfontface() {
|
||||
var pos = position();
|
||||
var m = match(/^@font-face\s*/);
|
||||
if (!m) return;
|
||||
|
||||
if (!open()) return error("@font-face missing '{'");
|
||||
var decls = comments();
|
||||
|
||||
// declarations
|
||||
var decl;
|
||||
while (decl = declaration()) {
|
||||
decls.push(decl);
|
||||
decls = decls.concat(comments());
|
||||
}
|
||||
|
||||
if (!close()) return error("@font-face missing '}'");
|
||||
|
||||
return pos({
|
||||
type: 'font-face',
|
||||
declarations: decls
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse import
|
||||
*/
|
||||
|
||||
var atimport = _compileAtrule('import');
|
||||
|
||||
/**
|
||||
* Parse charset
|
||||
*/
|
||||
|
||||
var atcharset = _compileAtrule('charset');
|
||||
|
||||
/**
|
||||
* Parse namespace
|
||||
*/
|
||||
|
||||
var atnamespace = _compileAtrule('namespace');
|
||||
|
||||
/**
|
||||
* Parse non-block at-rules
|
||||
*/
|
||||
|
||||
|
||||
function _compileAtrule(name) {
|
||||
var re = new RegExp('^@' + name + '\\s*([^;]+);');
|
||||
return function() {
|
||||
var pos = position();
|
||||
var m = match(re);
|
||||
if (!m) return;
|
||||
var ret = { type: name };
|
||||
ret[name] = m[1].trim();
|
||||
return pos(ret);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse at rule.
|
||||
*/
|
||||
|
||||
function atrule() {
|
||||
if (css[0] != '@') return;
|
||||
|
||||
return atkeyframes()
|
||||
|| atmedia()
|
||||
|| atcustommedia()
|
||||
|| atsupports()
|
||||
|| atimport()
|
||||
|| atcharset()
|
||||
|| atnamespace()
|
||||
|| atdocument()
|
||||
|| atpage()
|
||||
|| athost()
|
||||
|| atfontface();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse rule.
|
||||
*/
|
||||
|
||||
function rule() {
|
||||
var pos = position();
|
||||
var sel = selector();
|
||||
|
||||
if (!sel) return error('selector missing');
|
||||
comments();
|
||||
|
||||
return pos({
|
||||
type: 'rule',
|
||||
selectors: sel,
|
||||
declarations: declarations()
|
||||
});
|
||||
}
|
||||
|
||||
return addParent(stylesheet());
|
||||
};
|
||||
|
||||
/**
|
||||
* Trim `str`.
|
||||
*/
|
||||
|
||||
function trim(str) {
|
||||
return str ? str.replace(/^\s+|\s+$/g, '') : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds non-enumerable parent node reference to each node.
|
||||
*/
|
||||
|
||||
function addParent(obj, parent) {
|
||||
var isNode = obj && typeof obj.type === 'string';
|
||||
var childParent = isNode ? obj : parent;
|
||||
|
||||
for (var k in obj) {
|
||||
var value = obj[k];
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach(function(v) { addParent(v, childParent); });
|
||||
} else if (value && typeof value === 'object') {
|
||||
addParent(value, childParent);
|
||||
}
|
||||
}
|
||||
|
||||
if (isNode) {
|
||||
Object.defineProperty(obj, 'parent', {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
value: parent || null
|
||||
});
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
|
||||
/**
|
||||
* Expose `Compiler`.
|
||||
*/
|
||||
|
||||
module.exports = Compiler;
|
||||
|
||||
/**
|
||||
* Initialize a compiler.
|
||||
*
|
||||
* @param {Type} name
|
||||
* @return {Type}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function Compiler(opts) {
|
||||
this.options = opts || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit `str`
|
||||
*/
|
||||
|
||||
Compiler.prototype.emit = function(str) {
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit `node`.
|
||||
*/
|
||||
|
||||
Compiler.prototype.visit = function(node){
|
||||
return this[node.type](node);
|
||||
};
|
||||
|
||||
/**
|
||||
* Map visit over array of `nodes`, optionally using a `delim`
|
||||
*/
|
||||
|
||||
Compiler.prototype.mapVisit = function(nodes, delim){
|
||||
var buf = '';
|
||||
delim = delim || '';
|
||||
|
||||
for (var i = 0, length = nodes.length; i < length; i++) {
|
||||
buf += this.visit(nodes[i]);
|
||||
if (delim && i < length - 1) buf += this.emit(delim);
|
||||
}
|
||||
|
||||
return buf;
|
||||
};
|
||||
|
|
@ -1,199 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Base = require('./compiler');
|
||||
var inherits = require('inherits');
|
||||
|
||||
/**
|
||||
* Expose compiler.
|
||||
*/
|
||||
|
||||
module.exports = Compiler;
|
||||
|
||||
/**
|
||||
* Initialize a new `Compiler`.
|
||||
*/
|
||||
|
||||
function Compiler(options) {
|
||||
Base.call(this, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Inherit from `Base.prototype`.
|
||||
*/
|
||||
|
||||
inherits(Compiler, Base);
|
||||
|
||||
/**
|
||||
* Compile `node`.
|
||||
*/
|
||||
|
||||
Compiler.prototype.compile = function(node){
|
||||
return node.stylesheet
|
||||
.rules.map(this.visit, this)
|
||||
.join('');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit comment node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.comment = function(node){
|
||||
return this.emit('', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit import node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.import = function(node){
|
||||
return this.emit('@import ' + node.import + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.media = function(node){
|
||||
return this.emit('@media ' + node.media, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit document node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.document = function(node){
|
||||
var doc = '@' + (node.vendor || '') + 'document ' + node.document;
|
||||
|
||||
return this.emit(doc, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit charset node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.charset = function(node){
|
||||
return this.emit('@charset ' + node.charset + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit namespace node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.namespace = function(node){
|
||||
return this.emit('@namespace ' + node.namespace + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit supports node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.supports = function(node){
|
||||
return this.emit('@supports ' + node.supports, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframes node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframes = function(node){
|
||||
return this.emit('@'
|
||||
+ (node.vendor || '')
|
||||
+ 'keyframes '
|
||||
+ node.name, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.keyframes)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframe node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframe = function(node){
|
||||
var decls = node.declarations;
|
||||
|
||||
return this.emit(node.values.join(','), node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(decls)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit page node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.page = function(node){
|
||||
var sel = node.selectors.length
|
||||
? node.selectors.join(', ')
|
||||
: '';
|
||||
|
||||
return this.emit('@page ' + sel, node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.declarations)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit font-face node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['font-face'] = function(node){
|
||||
return this.emit('@font-face', node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.declarations)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit host node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.host = function(node){
|
||||
return this.emit('@host', node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(node.rules)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit custom-media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['custom-media'] = function(node){
|
||||
return this.emit('@custom-media ' + node.name + ' ' + node.media + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit rule node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.rule = function(node){
|
||||
var decls = node.declarations;
|
||||
if (!decls.length) return '';
|
||||
|
||||
return this.emit(node.selectors.join(','), node.position)
|
||||
+ this.emit('{')
|
||||
+ this.mapVisit(decls)
|
||||
+ this.emit('}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit declaration node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.declaration = function(node){
|
||||
return this.emit(node.property + ':' + node.value, node.position) + this.emit(';');
|
||||
};
|
||||
|
||||
|
|
@ -1,254 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Base = require('./compiler');
|
||||
var inherits = require('inherits');
|
||||
|
||||
/**
|
||||
* Expose compiler.
|
||||
*/
|
||||
|
||||
module.exports = Compiler;
|
||||
|
||||
/**
|
||||
* Initialize a new `Compiler`.
|
||||
*/
|
||||
|
||||
function Compiler(options) {
|
||||
options = options || {};
|
||||
Base.call(this, options);
|
||||
this.indentation = typeof options.indent === 'string' ? options.indent : ' ';
|
||||
}
|
||||
|
||||
/**
|
||||
* Inherit from `Base.prototype`.
|
||||
*/
|
||||
|
||||
inherits(Compiler, Base);
|
||||
|
||||
/**
|
||||
* Compile `node`.
|
||||
*/
|
||||
|
||||
Compiler.prototype.compile = function(node){
|
||||
return this.stylesheet(node);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit stylesheet node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.stylesheet = function(node){
|
||||
return this.mapVisit(node.stylesheet.rules, '\n\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit comment node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.comment = function(node){
|
||||
return this.emit(this.indent() + '/*' + node.comment + '*/', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit import node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.import = function(node){
|
||||
return this.emit('@import ' + node.import + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.media = function(node){
|
||||
return this.emit('@media ' + node.media, node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit document node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.document = function(node){
|
||||
var doc = '@' + (node.vendor || '') + 'document ' + node.document;
|
||||
|
||||
return this.emit(doc, node.position)
|
||||
+ this.emit(
|
||||
' '
|
||||
+ ' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit charset node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.charset = function(node){
|
||||
return this.emit('@charset ' + node.charset + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit namespace node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.namespace = function(node){
|
||||
return this.emit('@namespace ' + node.namespace + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit supports node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.supports = function(node){
|
||||
return this.emit('@supports ' + node.supports, node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframes node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframes = function(node){
|
||||
return this.emit('@' + (node.vendor || '') + 'keyframes ' + node.name, node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.keyframes, '\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit keyframe node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.keyframe = function(node){
|
||||
var decls = node.declarations;
|
||||
|
||||
return this.emit(this.indent())
|
||||
+ this.emit(node.values.join(', '), node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(decls, '\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n'
|
||||
+ this.indent() + '}\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit page node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.page = function(node){
|
||||
var sel = node.selectors.length
|
||||
? node.selectors.join(', ') + ' '
|
||||
: '';
|
||||
|
||||
return this.emit('@page ' + sel, node.position)
|
||||
+ this.emit('{\n')
|
||||
+ this.emit(this.indent(1))
|
||||
+ this.mapVisit(node.declarations, '\n')
|
||||
+ this.emit(this.indent(-1))
|
||||
+ this.emit('\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit font-face node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['font-face'] = function(node){
|
||||
return this.emit('@font-face ', node.position)
|
||||
+ this.emit('{\n')
|
||||
+ this.emit(this.indent(1))
|
||||
+ this.mapVisit(node.declarations, '\n')
|
||||
+ this.emit(this.indent(-1))
|
||||
+ this.emit('\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit host node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.host = function(node){
|
||||
return this.emit('@host', node.position)
|
||||
+ this.emit(
|
||||
' {\n'
|
||||
+ this.indent(1))
|
||||
+ this.mapVisit(node.rules, '\n\n')
|
||||
+ this.emit(
|
||||
this.indent(-1)
|
||||
+ '\n}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit custom-media node.
|
||||
*/
|
||||
|
||||
Compiler.prototype['custom-media'] = function(node){
|
||||
return this.emit('@custom-media ' + node.name + ' ' + node.media + ';', node.position);
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit rule node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.rule = function(node){
|
||||
var indent = this.indent();
|
||||
var decls = node.declarations;
|
||||
if (!decls.length) return '';
|
||||
|
||||
return this.emit(node.selectors.map(function(s){ return indent + s }).join(',\n'), node.position)
|
||||
+ this.emit(' {\n')
|
||||
+ this.emit(this.indent(1))
|
||||
+ this.mapVisit(decls, '\n')
|
||||
+ this.emit(this.indent(-1))
|
||||
+ this.emit('\n' + this.indent() + '}');
|
||||
};
|
||||
|
||||
/**
|
||||
* Visit declaration node.
|
||||
*/
|
||||
|
||||
Compiler.prototype.declaration = function(node){
|
||||
return this.emit(this.indent())
|
||||
+ this.emit(node.property + ': ' + node.value, node.position)
|
||||
+ this.emit(';');
|
||||
};
|
||||
|
||||
/**
|
||||
* Increase, decrease or return current indentation.
|
||||
*/
|
||||
|
||||
Compiler.prototype.indent = function(level) {
|
||||
this.level = this.level || 1;
|
||||
|
||||
if (null != level) {
|
||||
this.level += level;
|
||||
return '';
|
||||
}
|
||||
|
||||
return Array(this.level).join(this.indentation);
|
||||
};
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var Compressed = require('./compress');
|
||||
var Identity = require('./identity');
|
||||
|
||||
/**
|
||||
* Stringfy the given AST `node`.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `compress` space-optimized output
|
||||
* - `sourcemap` return an object with `.code` and `.map`
|
||||
*
|
||||
* @param {Object} node
|
||||
* @param {Object} [options]
|
||||
* @return {String}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
module.exports = function(node, options){
|
||||
options = options || {};
|
||||
|
||||
var compiler = options.compress
|
||||
? new Compressed(options)
|
||||
: new Identity(options);
|
||||
|
||||
// source maps
|
||||
if (options.sourcemap) {
|
||||
var sourcemaps = require('./source-map-support');
|
||||
sourcemaps(compiler);
|
||||
|
||||
var code = compiler.compile(node);
|
||||
compiler.applySourceMaps();
|
||||
|
||||
var map = options.sourcemap === 'generator'
|
||||
? compiler.map
|
||||
: compiler.map.toJSON();
|
||||
|
||||
return { code: code, map: map };
|
||||
}
|
||||
|
||||
var code = compiler.compile(node);
|
||||
return code;
|
||||
};
|
||||
|
|
@ -1,133 +0,0 @@
|
|||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var SourceMap = require('source-map').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('source-map').SourceMapConsumer;
|
||||
var sourceMapResolve = require('source-map-resolve');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
/**
|
||||
* Expose `mixin()`.
|
||||
*/
|
||||
|
||||
module.exports = mixin;
|
||||
|
||||
/**
|
||||
* Ensure Windows-style paths are formatted properly
|
||||
*/
|
||||
|
||||
const makeFriendlyPath = function(aPath) {
|
||||
return path.sep === "\\" ? aPath.replace(/\\/g, "/").replace(/^[a-z]:\/?/i, "/") : aPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mixin source map support into `compiler`.
|
||||
*
|
||||
* @param {Compiler} compiler
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function mixin(compiler) {
|
||||
compiler._comment = compiler.comment;
|
||||
compiler.map = new SourceMap();
|
||||
compiler.position = { line: 1, column: 1 };
|
||||
compiler.files = {};
|
||||
for (var k in exports) compiler[k] = exports[k];
|
||||
}
|
||||
|
||||
/**
|
||||
* Update position.
|
||||
*
|
||||
* @param {String} str
|
||||
* @api private
|
||||
*/
|
||||
|
||||
exports.updatePosition = function(str) {
|
||||
var lines = str.match(/\n/g);
|
||||
if (lines) this.position.line += lines.length;
|
||||
var i = str.lastIndexOf('\n');
|
||||
this.position.column = ~i ? str.length - i : this.position.column + str.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Emit `str`.
|
||||
*
|
||||
* @param {String} str
|
||||
* @param {Object} [pos]
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
exports.emit = function(str, pos) {
|
||||
if (pos) {
|
||||
var sourceFile = makeFriendlyPath(pos.source || 'source.css');
|
||||
|
||||
this.map.addMapping({
|
||||
source: sourceFile,
|
||||
generated: {
|
||||
line: this.position.line,
|
||||
column: Math.max(this.position.column - 1, 0)
|
||||
},
|
||||
original: {
|
||||
line: pos.start.line,
|
||||
column: pos.start.column - 1
|
||||
}
|
||||
});
|
||||
|
||||
this.addFile(sourceFile, pos);
|
||||
}
|
||||
|
||||
this.updatePosition(str);
|
||||
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds a file to the source map output if it has not already been added
|
||||
* @param {String} file
|
||||
* @param {Object} pos
|
||||
*/
|
||||
|
||||
exports.addFile = function(file, pos) {
|
||||
if (typeof pos.content !== 'string') return;
|
||||
if (Object.prototype.hasOwnProperty.call(this.files, file)) return;
|
||||
|
||||
this.files[file] = pos.content;
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies any original source maps to the output and embeds the source file
|
||||
* contents in the source map.
|
||||
*/
|
||||
|
||||
exports.applySourceMaps = function() {
|
||||
Object.keys(this.files).forEach(function(file) {
|
||||
var content = this.files[file];
|
||||
this.map.setSourceContent(file, content);
|
||||
|
||||
if (this.options.inputSourcemaps !== false) {
|
||||
var originalMap = sourceMapResolve.resolveSync(
|
||||
content, file, fs.readFileSync);
|
||||
if (originalMap) {
|
||||
var map = new SourceMapConsumer(originalMap.map);
|
||||
var relativeTo = originalMap.sourcesRelativeTo;
|
||||
this.map.applySourceMap(map, file, makeFriendlyPath(path.dirname(relativeTo)));
|
||||
}
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Process comments, drops sourceMap comments.
|
||||
* @param {Object} node
|
||||
*/
|
||||
|
||||
exports.comment = function(node) {
|
||||
if (/^# sourceMappingURL=/.test(node.comment))
|
||||
return this.emit('', node.position);
|
||||
else
|
||||
return this._comment(node);
|
||||
};
|
||||
315
server/libs/ffbinaries/index.js
Normal file
315
server/libs/ffbinaries/index.js
Normal file
|
|
@ -0,0 +1,315 @@
|
|||
const os = require('os')
|
||||
const path = require('path')
|
||||
const axios = require('axios')
|
||||
const fse = require('../fsExtra')
|
||||
const async = require('../async')
|
||||
const StreamZip = require('../nodeStreamZip')
|
||||
const { finished } = require('stream/promises')
|
||||
|
||||
var API_URL = 'https://ffbinaries.com/api/v1'
|
||||
|
||||
var RUNTIME_CACHE = {}
|
||||
var errorMsgs = {
|
||||
connectionIssues: 'Couldn\'t connect to ffbinaries.com API. Check your Internet connection.',
|
||||
parsingVersionData: 'Couldn\'t parse retrieved version data.',
|
||||
parsingVersionList: 'Couldn\'t parse the list of available versions.',
|
||||
notFound: 'Requested data not found.',
|
||||
incorrectVersionParam: '"version" parameter must be a string.'
|
||||
}
|
||||
|
||||
function ensureDirSync(dir) {
|
||||
try {
|
||||
fse.accessSync(dir)
|
||||
} catch (e) {
|
||||
fse.mkdirSync(dir)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the platform key based on input string
|
||||
*/
|
||||
function resolvePlatform(input) {
|
||||
var rtn = null
|
||||
|
||||
switch (input) {
|
||||
case 'mac':
|
||||
case 'osx':
|
||||
case 'mac-64':
|
||||
case 'osx-64':
|
||||
rtn = 'osx-64'
|
||||
break
|
||||
|
||||
case 'linux':
|
||||
case 'linux-32':
|
||||
rtn = 'linux-32'
|
||||
break
|
||||
|
||||
case 'linux-64':
|
||||
rtn = 'linux-64'
|
||||
break
|
||||
|
||||
case 'linux-arm':
|
||||
case 'linux-armel':
|
||||
rtn = 'linux-armel'
|
||||
break
|
||||
|
||||
case 'linux-armhf':
|
||||
rtn = 'linux-armhf'
|
||||
break
|
||||
|
||||
case 'win':
|
||||
case 'win-32':
|
||||
case 'windows':
|
||||
case 'windows-32':
|
||||
rtn = 'windows-32'
|
||||
break
|
||||
|
||||
case 'win-64':
|
||||
case 'windows-64':
|
||||
rtn = 'windows-64'
|
||||
break
|
||||
|
||||
default:
|
||||
rtn = null
|
||||
}
|
||||
|
||||
return rtn
|
||||
}
|
||||
/**
|
||||
* Detects the platform of the machine the script is executed on.
|
||||
* Object can be provided to detect platform from info derived elsewhere.
|
||||
*
|
||||
* @param {object} osinfo Contains "type" and "arch" properties
|
||||
*/
|
||||
function detectPlatform(osinfo) {
|
||||
var inputIsValid = typeof osinfo === 'object' && typeof osinfo.type === 'string' && typeof osinfo.arch === 'string'
|
||||
var type = (inputIsValid ? osinfo.type : os.type()).toLowerCase()
|
||||
var arch = (inputIsValid ? osinfo.arch : os.arch()).toLowerCase()
|
||||
|
||||
if (type === 'darwin') {
|
||||
return 'osx-64'
|
||||
}
|
||||
|
||||
if (type === 'windows_nt') {
|
||||
return arch === 'x64' ? 'windows-64' : 'windows-32'
|
||||
}
|
||||
|
||||
if (type === 'linux') {
|
||||
if (arch === 'arm' || arch === 'arm64') {
|
||||
return 'linux-armel'
|
||||
}
|
||||
return arch === 'x64' ? 'linux-64' : 'linux-32'
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
/**
|
||||
* Gets the binary filename (appends exe in Windows)
|
||||
*
|
||||
* @param {string} component "ffmpeg", "ffplay", "ffprobe" or "ffserver"
|
||||
* @param {platform} platform "ffmpeg", "ffplay", "ffprobe" or "ffserver"
|
||||
*/
|
||||
function getBinaryFilename(component, platform) {
|
||||
var platformCode = resolvePlatform(platform)
|
||||
if (platformCode === 'windows-32' || platformCode === 'windows-64') {
|
||||
return component + '.exe'
|
||||
}
|
||||
return component
|
||||
}
|
||||
|
||||
function listPlatforms() {
|
||||
return ['osx-64', 'linux-32', 'linux-64', 'linux-armel', 'linux-armhf', 'windows-32', 'windows-64']
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<string[]>} array of version strings
|
||||
*/
|
||||
function listVersions() {
|
||||
if (RUNTIME_CACHE.versionsAll) {
|
||||
return RUNTIME_CACHE.versionsAll
|
||||
}
|
||||
return axios.get(API_URL).then((res) => {
|
||||
if (!res.data?.versions || !Object.keys(res.data.versions)?.length) {
|
||||
throw new Error(errorMsgs.parsingVersionList)
|
||||
}
|
||||
const versionKeys = Object.keys(res.data.versions)
|
||||
RUNTIME_CACHE.versionsAll = versionKeys
|
||||
return versionKeys
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Gets full data set from ffbinaries.com
|
||||
*/
|
||||
function getVersionData(version) {
|
||||
if (RUNTIME_CACHE[version]) {
|
||||
return RUNTIME_CACHE[version]
|
||||
}
|
||||
|
||||
if (version && typeof version !== 'string') {
|
||||
throw new Error(errorMsgs.incorrectVersionParam)
|
||||
}
|
||||
|
||||
var url = version ? '/version/' + version : '/latest'
|
||||
|
||||
return axios.get(`${API_URL}${url}`).then((res) => {
|
||||
RUNTIME_CACHE[version] = res.data
|
||||
return res.data
|
||||
}).catch((error) => {
|
||||
if (error.response?.status == 404) {
|
||||
throw new Error(errorMsgs.notFound)
|
||||
} else {
|
||||
throw new Error(errorMsgs.connectionIssues)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Download file(s) and save them in the specified directory
|
||||
*/
|
||||
async function downloadUrls(components, urls, opts) {
|
||||
const destinationDir = opts.destination
|
||||
const results = []
|
||||
const remappedUrls = []
|
||||
|
||||
if (components && !Array.isArray(components)) {
|
||||
components = [components]
|
||||
} else if (!components || !Array.isArray(components)) {
|
||||
components = []
|
||||
}
|
||||
|
||||
// returns an array of objects like this: {component: 'ffmpeg', url: 'https://...'}
|
||||
if (typeof urls === 'object') {
|
||||
for (const key in urls) {
|
||||
if (components.includes(key) && urls[key]) {
|
||||
remappedUrls.push({
|
||||
component: key,
|
||||
url: urls[key]
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function extractZipToDestination(zipFilename) {
|
||||
const oldpath = path.join(destinationDir, zipFilename)
|
||||
const zip = new StreamZip.async({ file: oldpath })
|
||||
const count = await zip.extract(null, destinationDir)
|
||||
await zip.close()
|
||||
}
|
||||
|
||||
|
||||
await async.each(remappedUrls, async function (urlObject) {
|
||||
try {
|
||||
const url = urlObject.url
|
||||
|
||||
const zipFilename = url.split('/').pop()
|
||||
const binFilenameBase = urlObject.component
|
||||
const binFilename = getBinaryFilename(binFilenameBase, opts.platform || detectPlatform())
|
||||
|
||||
let runningTotal = 0
|
||||
let totalFilesize
|
||||
let interval
|
||||
|
||||
|
||||
if (typeof opts.tickerFn === 'function') {
|
||||
opts.tickerInterval = parseInt(opts.tickerInterval, 10)
|
||||
const tickerInterval = (!Number.isNaN(opts.tickerInterval)) ? opts.tickerInterval : 1000
|
||||
const tickData = { filename: zipFilename, progress: 0 }
|
||||
|
||||
// Schedule next ticks
|
||||
interval = setInterval(function () {
|
||||
if (totalFilesize && runningTotal == totalFilesize) {
|
||||
return clearInterval(interval)
|
||||
}
|
||||
tickData.progress = totalFilesize > -1 ? runningTotal / totalFilesize : 0
|
||||
|
||||
opts.tickerFn(tickData)
|
||||
}, tickerInterval)
|
||||
}
|
||||
|
||||
|
||||
// Check if file already exists in target directory
|
||||
const binPath = path.join(destinationDir, binFilename)
|
||||
if (!opts.force && await fse.pathExists(binPath)) {
|
||||
// if the accessSync method doesn't throw we know the binary already exists
|
||||
results.push({
|
||||
filename: binFilename,
|
||||
path: destinationDir,
|
||||
status: 'File exists',
|
||||
code: 'FILE_EXISTS'
|
||||
})
|
||||
clearInterval(interval)
|
||||
return
|
||||
}
|
||||
|
||||
if (opts.quiet) clearInterval(interval)
|
||||
|
||||
const zipPath = path.join(destinationDir, zipFilename)
|
||||
const zipFileTempName = zipPath + '.part'
|
||||
const zipFileFinalName = zipPath
|
||||
|
||||
const response = await axios({
|
||||
url,
|
||||
method: 'GET',
|
||||
responseType: 'stream'
|
||||
})
|
||||
totalFilesize = response.headers?.['content-length'] || []
|
||||
|
||||
const writer = fse.createWriteStream(zipFileTempName)
|
||||
response.data.on('data', (chunk) => {
|
||||
runningTotal += chunk.length
|
||||
})
|
||||
response.data.pipe(writer)
|
||||
await finished(writer)
|
||||
await fse.rename(zipFileTempName, zipFileFinalName)
|
||||
await extractZipToDestination(zipFilename)
|
||||
await fse.remove(zipFileFinalName)
|
||||
|
||||
results.push({
|
||||
filename: binFilename,
|
||||
path: destinationDir,
|
||||
size: Math.floor(totalFilesize / 1024 / 1024 * 1000) / 1000 + 'MB',
|
||||
status: 'File extracted to destination (downloaded from "' + url + '")',
|
||||
code: 'DONE_CLEAN'
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Failed to download or extract file for component: ${urlObject.component}`, err)
|
||||
}
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets binaries for the platform
|
||||
* It will get the data from ffbinaries, pick the correct files
|
||||
* and save it to the specified directory
|
||||
*
|
||||
* @param {Array} components
|
||||
* @param {Object} [opts]
|
||||
*/
|
||||
async function downloadBinaries(components, opts = {}) {
|
||||
var platform = resolvePlatform(opts.platform) || detectPlatform()
|
||||
|
||||
opts.destination = path.resolve(opts.destination || '.')
|
||||
ensureDirSync(opts.destination)
|
||||
|
||||
const versionData = await getVersionData(opts.version)
|
||||
const urls = versionData?.bin?.[platform]
|
||||
if (!urls) {
|
||||
throw new Error('No URLs!')
|
||||
}
|
||||
|
||||
return await downloadUrls(components, urls, opts)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
downloadBinaries: downloadBinaries,
|
||||
getVersionData: getVersionData,
|
||||
listVersions: listVersions,
|
||||
listPlatforms: listPlatforms,
|
||||
detectPlatform: detectPlatform,
|
||||
resolvePlatform: resolvePlatform,
|
||||
getBinaryFilename: getBinaryFilename
|
||||
}
|
||||
20
server/libs/passportLocal/LICENSE
Normal file
20
server/libs/passportLocal/LICENSE
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2011-2014 Jared Hanson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
20
server/libs/passportLocal/index.js
Normal file
20
server/libs/passportLocal/index.js
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// modified for audiobookshelf
|
||||
// Source: https://github.com/jaredhanson/passport-local
|
||||
//
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var Strategy = require('./strategy');
|
||||
|
||||
|
||||
/**
|
||||
* Expose `Strategy` directly from package.
|
||||
*/
|
||||
exports = module.exports = Strategy;
|
||||
|
||||
/**
|
||||
* Export constructors.
|
||||
*/
|
||||
exports.Strategy = Strategy;
|
||||
119
server/libs/passportLocal/strategy.js
Normal file
119
server/libs/passportLocal/strategy.js
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
const passport = require('passport-strategy')
|
||||
const util = require('util')
|
||||
|
||||
|
||||
function lookup(obj, field) {
|
||||
if (!obj) { return null; }
|
||||
var chain = field.split(']').join('').split('[');
|
||||
for (var i = 0, len = chain.length; i < len; i++) {
|
||||
var prop = obj[chain[i]];
|
||||
if (typeof (prop) === 'undefined') { return null; }
|
||||
if (typeof (prop) !== 'object') { return prop; }
|
||||
obj = prop;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* `Strategy` constructor.
|
||||
*
|
||||
* The local authentication strategy authenticates requests based on the
|
||||
* credentials submitted through an HTML-based login form.
|
||||
*
|
||||
* Applications must supply a `verify` callback which accepts `username` and
|
||||
* `password` credentials, and then calls the `done` callback supplying a
|
||||
* `user`, which should be set to `false` if the credentials are not valid.
|
||||
* If an exception occured, `err` should be set.
|
||||
*
|
||||
* Optionally, `options` can be used to change the fields in which the
|
||||
* credentials are found.
|
||||
*
|
||||
* Options:
|
||||
* - `usernameField` field name where the username is found, defaults to _username_
|
||||
* - `passwordField` field name where the password is found, defaults to _password_
|
||||
* - `passReqToCallback` when `true`, `req` is the first argument to the verify callback (default: `false`)
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* passport.use(new LocalStrategy(
|
||||
* function(username, password, done) {
|
||||
* User.findOne({ username: username, password: password }, function (err, user) {
|
||||
* done(err, user);
|
||||
* });
|
||||
* }
|
||||
* ));
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {Function} verify
|
||||
* @api public
|
||||
*/
|
||||
function Strategy(options, verify) {
|
||||
if (typeof options == 'function') {
|
||||
verify = options;
|
||||
options = {};
|
||||
}
|
||||
if (!verify) { throw new TypeError('LocalStrategy requires a verify callback'); }
|
||||
|
||||
this._usernameField = options.usernameField || 'username';
|
||||
this._passwordField = options.passwordField || 'password';
|
||||
|
||||
passport.Strategy.call(this);
|
||||
this.name = 'local';
|
||||
this._verify = verify;
|
||||
this._passReqToCallback = options.passReqToCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inherit from `passport.Strategy`.
|
||||
*/
|
||||
util.inherits(Strategy, passport.Strategy);
|
||||
|
||||
/**
|
||||
* Authenticate request based on the contents of a form submission.
|
||||
*
|
||||
* @param {Object} req
|
||||
* @api protected
|
||||
*/
|
||||
Strategy.prototype.authenticate = function (req, options) {
|
||||
options = options || {};
|
||||
var username = lookup(req.body, this._usernameField)
|
||||
if (username === null) {
|
||||
lookup(req.query, this._usernameField);
|
||||
}
|
||||
|
||||
var password = lookup(req.body, this._passwordField)
|
||||
if (password === null) {
|
||||
password = lookup(req.query, this._passwordField);
|
||||
}
|
||||
|
||||
if (username === null || password === null) {
|
||||
return this.fail({ message: options.badRequestMessage || 'Missing credentials' }, 400);
|
||||
}
|
||||
|
||||
var self = this;
|
||||
|
||||
function verified(err, user, info) {
|
||||
if (err) { return self.error(err); }
|
||||
if (!user) { return self.fail(info); }
|
||||
self.success(user, info);
|
||||
}
|
||||
|
||||
try {
|
||||
if (self._passReqToCallback) {
|
||||
this._verify(req, username, password, verified);
|
||||
} else {
|
||||
this._verify(username, password, verified);
|
||||
}
|
||||
} catch (ex) {
|
||||
return self.error(ex);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Expose `Strategy`.
|
||||
*/
|
||||
module.exports = Strategy;
|
||||
|
|
@ -4,14 +4,13 @@ const fs = require('../libs/fsExtra')
|
|||
|
||||
const workerThreads = require('worker_threads')
|
||||
const Logger = require('../Logger')
|
||||
const TaskManager = require('./TaskManager')
|
||||
const Task = require('../objects/Task')
|
||||
const { writeConcatFile } = require('../utils/ffmpegHelpers')
|
||||
const toneHelpers = require('../utils/toneHelpers')
|
||||
|
||||
class AbMergeManager {
|
||||
constructor(taskManager) {
|
||||
this.taskManager = taskManager
|
||||
|
||||
constructor() {
|
||||
this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items')
|
||||
|
||||
this.pendingTasks = []
|
||||
|
|
@ -45,7 +44,7 @@ class AbMergeManager {
|
|||
}
|
||||
const taskDescription = `Encoding audiobook "${libraryItem.media.metadata.title}" into a single m4b file.`
|
||||
task.setData('encode-m4b', 'Encoding M4b', taskDescription, false, taskData)
|
||||
this.taskManager.addTask(task)
|
||||
TaskManager.addTask(task)
|
||||
Logger.info(`Start m4b encode for ${libraryItem.id} - TaskId: ${task.id}`)
|
||||
|
||||
if (!await fs.pathExists(taskData.itemCachePath)) {
|
||||
|
|
@ -234,7 +233,7 @@ class AbMergeManager {
|
|||
}
|
||||
}
|
||||
|
||||
this.taskManager.taskFinished(task)
|
||||
TaskManager.taskFinished(task)
|
||||
}
|
||||
}
|
||||
module.exports = AbMergeManager
|
||||
|
|
|
|||
54
server/managers/ApiCacheManager.js
Normal file
54
server/managers/ApiCacheManager.js
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
const { LRUCache } = require('lru-cache')
|
||||
const Logger = require('../Logger')
|
||||
const Database = require('../Database')
|
||||
|
||||
class ApiCacheManager {
|
||||
|
||||
defaultCacheOptions = { max: 1000, maxSize: 10 * 1000 * 1000, sizeCalculation: item => (item.body.length + JSON.stringify(item.headers).length) }
|
||||
defaultTtlOptions = { ttl: 30 * 60 * 1000 }
|
||||
|
||||
constructor(cache = new LRUCache(this.defaultCacheOptions), ttlOptions = this.defaultTtlOptions) {
|
||||
this.cache = cache
|
||||
this.ttlOptions = ttlOptions
|
||||
}
|
||||
|
||||
init(database = Database) {
|
||||
let hooks = ['afterCreate', 'afterUpdate', 'afterDestroy', 'afterBulkCreate', 'afterBulkUpdate', 'afterBulkDestroy', 'afterUpsert']
|
||||
hooks.forEach(hook => database.sequelize.addHook(hook, (model) => this.clear(model, hook)))
|
||||
}
|
||||
|
||||
clear(model, hook) {
|
||||
Logger.debug(`[ApiCacheManager] ${model.constructor.name}.${hook}: Clearing cache`)
|
||||
this.cache.clear()
|
||||
}
|
||||
|
||||
get middleware() {
|
||||
return (req, res, next) => {
|
||||
const key = { user: req.user.username, url: req.url }
|
||||
const stringifiedKey = JSON.stringify(key)
|
||||
Logger.debug(`[ApiCacheManager] count: ${this.cache.size} size: ${this.cache.calculatedSize}`)
|
||||
const cached = this.cache.get(stringifiedKey)
|
||||
if (cached) {
|
||||
Logger.debug(`[ApiCacheManager] Cache hit: ${stringifiedKey}`)
|
||||
res.set(cached.headers)
|
||||
res.status(cached.statusCode)
|
||||
res.send(cached.body)
|
||||
return
|
||||
}
|
||||
res.originalSend = res.send
|
||||
res.send = (body) => {
|
||||
Logger.debug(`[ApiCacheManager] Cache miss: ${stringifiedKey}`)
|
||||
const cached = { body, headers: res.getHeaders(), statusCode: res.statusCode }
|
||||
if (key.url.search(/^\/libraries\/.*?\/personalized/) !== -1) {
|
||||
Logger.debug(`[ApiCacheManager] Caching with ${this.ttlOptions.ttl} ms TTL`)
|
||||
this.cache.set(stringifiedKey, cached, this.ttlOptions)
|
||||
} else {
|
||||
this.cache.set(stringifiedKey, cached)
|
||||
}
|
||||
res.originalSend(body)
|
||||
}
|
||||
next()
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = ApiCacheManager
|
||||
|
|
@ -7,12 +7,12 @@ const fs = require('../libs/fsExtra')
|
|||
|
||||
const toneHelpers = require('../utils/toneHelpers')
|
||||
|
||||
const TaskManager = require('./TaskManager')
|
||||
|
||||
const Task = require('../objects/Task')
|
||||
|
||||
class AudioMetadataMangaer {
|
||||
constructor(taskManager) {
|
||||
this.taskManager = taskManager
|
||||
|
||||
constructor() {
|
||||
this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items')
|
||||
|
||||
this.MAX_CONCURRENT_TASKS = 1
|
||||
|
|
@ -101,7 +101,7 @@ class AudioMetadataMangaer {
|
|||
|
||||
async runMetadataEmbed(task) {
|
||||
this.tasksRunning.push(task)
|
||||
this.taskManager.addTask(task)
|
||||
TaskManager.addTask(task)
|
||||
|
||||
Logger.info(`[AudioMetadataManager] Starting metadata embed task`, task.description)
|
||||
|
||||
|
|
@ -176,7 +176,7 @@ class AudioMetadataMangaer {
|
|||
}
|
||||
|
||||
handleTaskFinished(task) {
|
||||
this.taskManager.taskFinished(task)
|
||||
TaskManager.taskFinished(task)
|
||||
this.tasksRunning = this.tasksRunning.filter(t => t.id !== task.id)
|
||||
|
||||
if (this.tasksRunning.length < this.MAX_CONCURRENT_TASKS && this.tasksQueued.length) {
|
||||
|
|
|
|||
74
server/managers/BinaryManager.js
Normal file
74
server/managers/BinaryManager.js
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
const path = require('path')
|
||||
const which = require('../libs/which')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const ffbinaries = require('../libs/ffbinaries')
|
||||
const Logger = require('../Logger')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
|
||||
class BinaryManager {
|
||||
|
||||
defaultRequiredBinaries = [
|
||||
{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH' },
|
||||
{ name: 'ffprobe', envVariable: 'FFPROBE_PATH' }
|
||||
]
|
||||
|
||||
constructor(requiredBinaries = this.defaultRequiredBinaries) {
|
||||
this.requiredBinaries = requiredBinaries
|
||||
this.mainInstallPath = process.pkg ? path.dirname(process.execPath) : global.appRoot
|
||||
this.altInstallPath = global.ConfigPath
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (this.initialized) return
|
||||
const missingBinaries = await this.findRequiredBinaries()
|
||||
if (missingBinaries.length == 0) return
|
||||
await this.install(missingBinaries)
|
||||
const missingBinariesAfterInstall = await this.findRequiredBinaries()
|
||||
if (missingBinariesAfterInstall.length != 0) {
|
||||
Logger.error(`[BinaryManager] Failed to find or install required binaries: ${missingBinariesAfterInstall.join(', ')}`)
|
||||
process.exit(1)
|
||||
}
|
||||
this.initialized = true
|
||||
}
|
||||
|
||||
async findRequiredBinaries() {
|
||||
const missingBinaries = []
|
||||
for (const binary of this.requiredBinaries) {
|
||||
const binaryPath = await this.findBinary(binary.name, binary.envVariable)
|
||||
if (binaryPath) {
|
||||
Logger.info(`[BinaryManager] Found ${binary.name} at ${binaryPath}`)
|
||||
if (process.env[binary.envVariable] !== binaryPath) {
|
||||
Logger.info(`[BinaryManager] Updating process.env.${binary.envVariable}`)
|
||||
process.env[binary.envVariable] = binaryPath
|
||||
}
|
||||
} else {
|
||||
Logger.info(`[BinaryManager] ${binary.name} not found`)
|
||||
missingBinaries.push(binary.name)
|
||||
}
|
||||
}
|
||||
return missingBinaries
|
||||
}
|
||||
|
||||
async findBinary(name, envVariable) {
|
||||
const executable = name + (process.platform == 'win32' ? '.exe' : '')
|
||||
const defaultPath = process.env[envVariable]
|
||||
if (defaultPath && await fs.pathExists(defaultPath)) return defaultPath
|
||||
const whichPath = which.sync(executable, { nothrow: true })
|
||||
if (whichPath) return whichPath
|
||||
const mainInstallPath = path.join(this.mainInstallPath, executable)
|
||||
if (await fs.pathExists(mainInstallPath)) return mainInstallPath
|
||||
const altInstallPath = path.join(this.altInstallPath, executable)
|
||||
if (await fs.pathExists(altInstallPath)) return altInstallPath
|
||||
return null
|
||||
}
|
||||
|
||||
async install(binaries) {
|
||||
if (binaries.length == 0) return
|
||||
Logger.info(`[BinaryManager] Installing binaries: ${binaries.join(', ')}`)
|
||||
let destination = await fileUtils.isWritable(this.mainInstallPath) ? this.mainInstallPath : this.altInstallPath
|
||||
await ffbinaries.downloadBinaries(binaries, { destination })
|
||||
Logger.info(`[BinaryManager] Binaries installed to ${destination}`)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BinaryManager
|
||||
|
|
@ -5,8 +5,10 @@ const readChunk = require('../libs/readChunk')
|
|||
const imageType = require('../libs/imageType')
|
||||
|
||||
const globals = require('../utils/globals')
|
||||
const { downloadFile, filePathToPOSIX, checkPathIsFile } = require('../utils/fileUtils')
|
||||
const { downloadImageFile, filePathToPOSIX, checkPathIsFile } = require('../utils/fileUtils')
|
||||
const { extractCoverArt } = require('../utils/ffmpegHelpers')
|
||||
const parseEbookMetadata = require('../utils/parsers/parseEbookMetadata')
|
||||
|
||||
const CacheManager = require('../managers/CacheManager')
|
||||
|
||||
class CoverManager {
|
||||
|
|
@ -120,13 +122,16 @@ class CoverManager {
|
|||
await fs.ensureDir(coverDirPath)
|
||||
|
||||
var temppath = Path.posix.join(coverDirPath, 'cover')
|
||||
var success = await downloadFile(url, temppath).then(() => true).catch((err) => {
|
||||
Logger.error(`[CoverManager] Download image file failed for "${url}"`, err)
|
||||
|
||||
let errorMsg = ''
|
||||
let success = await downloadImageFile(url, temppath).then(() => true).catch((err) => {
|
||||
errorMsg = err.message || 'Unknown error'
|
||||
Logger.error(`[CoverManager] Download image file failed for "${url}"`, errorMsg)
|
||||
return false
|
||||
})
|
||||
if (!success) {
|
||||
return {
|
||||
error: 'Failed to download image from url'
|
||||
error: 'Failed to download image from url: ' + errorMsg
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -231,6 +236,7 @@ class CoverManager {
|
|||
|
||||
/**
|
||||
* Extract cover art from audio file and save for library item
|
||||
*
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {string} libraryItemId
|
||||
* @param {string} [libraryItemPath] null for isFile library items
|
||||
|
|
@ -265,6 +271,44 @@ class CoverManager {
|
|||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract cover art from ebook and save for library item
|
||||
*
|
||||
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
|
||||
* @param {string} libraryItemId
|
||||
* @param {string} [libraryItemPath] null for isFile library items
|
||||
* @returns {Promise<string>} returns cover path
|
||||
*/
|
||||
async saveEbookCoverArt(ebookFileScanData, libraryItemId, libraryItemPath) {
|
||||
if (!ebookFileScanData?.ebookCoverPath) return null
|
||||
|
||||
let coverDirPath = null
|
||||
if (global.ServerSettings.storeCoverWithItem && libraryItemPath) {
|
||||
coverDirPath = libraryItemPath
|
||||
} else {
|
||||
coverDirPath = Path.posix.join(global.MetadataPath, 'items', libraryItemId)
|
||||
}
|
||||
await fs.ensureDir(coverDirPath)
|
||||
|
||||
let extname = Path.extname(ebookFileScanData.ebookCoverPath) || '.jpg'
|
||||
if (extname === '.jpeg') extname = '.jpg'
|
||||
const coverFilename = `cover${extname}`
|
||||
const coverFilePath = Path.join(coverDirPath, coverFilename)
|
||||
|
||||
// TODO: Overwrite if exists?
|
||||
const coverAlreadyExists = await fs.pathExists(coverFilePath)
|
||||
if (coverAlreadyExists) {
|
||||
Logger.warn(`[CoverManager] Extract embedded cover art but cover already exists for "${coverFilePath}" - overwriting`)
|
||||
}
|
||||
|
||||
const success = await parseEbookMetadata.extractCoverImage(ebookFileScanData, coverFilePath)
|
||||
if (success) {
|
||||
await CacheManager.purgeCoverCache(libraryItemId)
|
||||
return coverFilePath
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} url
|
||||
|
|
@ -284,7 +328,7 @@ class CoverManager {
|
|||
await fs.ensureDir(coverDirPath)
|
||||
|
||||
const temppath = Path.posix.join(coverDirPath, 'cover')
|
||||
const success = await downloadFile(url, temppath).then(() => true).catch((err) => {
|
||||
const success = await downloadImageFile(url, temppath).then(() => true).catch((err) => {
|
||||
Logger.error(`[CoverManager] Download image file failed for "${url}"`, err)
|
||||
return false
|
||||
})
|
||||
|
|
|
|||
|
|
@ -127,8 +127,7 @@ class CronManager {
|
|||
}
|
||||
}
|
||||
|
||||
async executePodcastCron(expression, libraryItemIds) {
|
||||
Logger.debug(`[CronManager] Start executing podcast cron ${expression} for ${libraryItemIds.length} item(s)`)
|
||||
async executePodcastCron(expression) {
|
||||
const podcastCron = this.podcastCrons.find(cron => cron.expression === expression)
|
||||
if (!podcastCron) {
|
||||
Logger.error(`[CronManager] Podcast cron not found for expression ${expression}`)
|
||||
|
|
@ -136,6 +135,9 @@ class CronManager {
|
|||
}
|
||||
this.podcastCronExpressionsExecuting.push(expression)
|
||||
|
||||
const libraryItemIds = podcastCron.libraryItemIds
|
||||
Logger.debug(`[CronManager] Start executing podcast cron ${expression} for ${libraryItemIds.length} item(s)`)
|
||||
|
||||
// Get podcast library items to check
|
||||
const libraryItems = []
|
||||
for (const libraryItemId of libraryItemIds) {
|
||||
|
|
|
|||
|
|
@ -330,14 +330,15 @@ class PlaybackSessionManager {
|
|||
Logger.debug(`[PlaybackSessionManager] Removed session "${sessionId}"`)
|
||||
}
|
||||
|
||||
// Check for streams that are not in memory and remove
|
||||
/**
|
||||
* Remove all stream folders in `/metadata/streams`
|
||||
*/
|
||||
async removeOrphanStreams() {
|
||||
await fs.ensureDir(this.StreamsPath)
|
||||
try {
|
||||
const streamsInPath = await fs.readdir(this.StreamsPath)
|
||||
for (let i = 0; i < streamsInPath.length; i++) {
|
||||
const streamId = streamsInPath[i]
|
||||
if (streamId.startsWith('play_')) { // Make sure to only remove folders that are a stream
|
||||
for (const streamId of streamsInPath) {
|
||||
if (/[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/.test(streamId)) { // Ensure is uuidv4
|
||||
const session = this.sessions.find(se => se.id === streamId)
|
||||
if (!session) {
|
||||
const streamPath = Path.join(this.StreamsPath, streamId)
|
||||
|
|
|
|||
|
|
@ -12,17 +12,17 @@ const opmlGenerator = require('../utils/generators/opmlGenerator')
|
|||
const prober = require('../utils/prober')
|
||||
const ffmpegHelpers = require('../utils/ffmpegHelpers')
|
||||
|
||||
const TaskManager = require('./TaskManager')
|
||||
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const PodcastEpisodeDownload = require('../objects/PodcastEpisodeDownload')
|
||||
const PodcastEpisode = require('../objects/entities/PodcastEpisode')
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
const Task = require("../objects/Task")
|
||||
|
||||
class PodcastManager {
|
||||
constructor(watcher, notificationManager, taskManager) {
|
||||
constructor(watcher, notificationManager) {
|
||||
this.watcher = watcher
|
||||
this.notificationManager = notificationManager
|
||||
this.taskManager = taskManager
|
||||
|
||||
this.downloadQueue = []
|
||||
this.currentDownload = null
|
||||
|
|
@ -69,14 +69,12 @@ class PodcastManager {
|
|||
return
|
||||
}
|
||||
|
||||
const task = new Task()
|
||||
const taskDescription = `Downloading episode "${podcastEpisodeDownload.podcastEpisode.title}".`
|
||||
const taskData = {
|
||||
libraryId: podcastEpisodeDownload.libraryId,
|
||||
libraryItemId: podcastEpisodeDownload.libraryItemId,
|
||||
}
|
||||
task.setData('download-podcast-episode', 'Downloading Episode', taskDescription, false, taskData)
|
||||
this.taskManager.addTask(task)
|
||||
const task = TaskManager.createAndAddTask('download-podcast-episode', 'Downloading Episode', taskDescription, false, taskData)
|
||||
|
||||
SocketAuthority.emitter('episode_download_started', podcastEpisodeDownload.toJSONForClient())
|
||||
this.currentDownload = podcastEpisodeDownload
|
||||
|
|
@ -128,7 +126,7 @@ class PodcastManager {
|
|||
this.currentDownload.setFinished(false)
|
||||
}
|
||||
|
||||
this.taskManager.taskFinished(task)
|
||||
TaskManager.taskFinished(task)
|
||||
|
||||
SocketAuthority.emitter('episode_download_finished', this.currentDownload.toJSONForClient())
|
||||
SocketAuthority.emitter('episode_download_queue_updated', this.getDownloadQueueDetails())
|
||||
|
|
@ -201,7 +199,7 @@ class PodcastManager {
|
|||
})
|
||||
// TODO: Should we check for open playback sessions for this episode?
|
||||
// TODO: remove all user progress for this episode
|
||||
if (oldestEpisode && oldestEpisode.audioFile) {
|
||||
if (oldestEpisode?.audioFile) {
|
||||
Logger.info(`[PodcastManager] Deleting oldest episode "${oldestEpisode.title}"`)
|
||||
const successfullyDeleted = await removeFile(oldestEpisode.audioFile.metadata.path)
|
||||
if (successfullyDeleted) {
|
||||
|
|
@ -246,7 +244,7 @@ class PodcastManager {
|
|||
Logger.debug(`[PodcastManager] runEpisodeCheck: "${libraryItem.media.metadata.title}" checking for episodes after ${new Date(dateToCheckForEpisodesAfter)}`)
|
||||
|
||||
var newEpisodes = await this.checkPodcastForNewEpisodes(libraryItem, dateToCheckForEpisodesAfter, libraryItem.media.maxNewEpisodesToDownload)
|
||||
Logger.debug(`[PodcastManager] runEpisodeCheck: ${newEpisodes ? newEpisodes.length : 'N/A'} episodes found`)
|
||||
Logger.debug(`[PodcastManager] runEpisodeCheck: ${newEpisodes?.length || 'N/A'} episodes found`)
|
||||
|
||||
if (!newEpisodes) { // Failed
|
||||
// Allow up to MaxFailedEpisodeChecks failed attempts before disabling auto download
|
||||
|
|
@ -280,14 +278,14 @@ class PodcastManager {
|
|||
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes no feed url for ${podcastLibraryItem.media.metadata.title} (ID: ${podcastLibraryItem.id})`)
|
||||
return false
|
||||
}
|
||||
var feed = await getPodcastFeed(podcastLibraryItem.media.metadata.feedUrl)
|
||||
if (!feed || !feed.episodes) {
|
||||
const feed = await getPodcastFeed(podcastLibraryItem.media.metadata.feedUrl)
|
||||
if (!feed?.episodes) {
|
||||
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes invalid feed payload for ${podcastLibraryItem.media.metadata.title} (ID: ${podcastLibraryItem.id})`, feed)
|
||||
return false
|
||||
}
|
||||
|
||||
// Filter new and not already has
|
||||
var newEpisodes = feed.episodes.filter(ep => ep.publishedAt > dateToCheckForEpisodesAfter && !podcastLibraryItem.media.checkHasEpisodeByFeedUrl(ep.enclosure.url))
|
||||
let newEpisodes = feed.episodes.filter(ep => ep.publishedAt > dateToCheckForEpisodesAfter && !podcastLibraryItem.media.checkHasEpisodeByFeedUrl(ep.enclosure.url))
|
||||
|
||||
if (maxNewEpisodes > 0) {
|
||||
newEpisodes = newEpisodes.slice(0, maxNewEpisodes)
|
||||
|
|
|
|||
|
|
@ -103,19 +103,29 @@ class RssFeedManager {
|
|||
await Database.updateFeed(feed)
|
||||
}
|
||||
} else if (feed.entityType === 'collection') {
|
||||
const collection = await Database.collectionModel.findByPk(feed.entityId)
|
||||
const collection = await Database.collectionModel.findByPk(feed.entityId, {
|
||||
include: Database.collectionBookModel
|
||||
})
|
||||
if (collection) {
|
||||
const collectionExpanded = await collection.getOldJsonExpanded()
|
||||
|
||||
// Find most recently updated item in collection
|
||||
let mostRecentlyUpdatedAt = collectionExpanded.lastUpdate
|
||||
// Check for most recently updated book
|
||||
collectionExpanded.books.forEach((libraryItem) => {
|
||||
if (libraryItem.media.tracks.length && libraryItem.updatedAt > mostRecentlyUpdatedAt) {
|
||||
mostRecentlyUpdatedAt = libraryItem.updatedAt
|
||||
}
|
||||
})
|
||||
// Check for most recently added collection book
|
||||
collection.collectionBooks.forEach((collectionBook) => {
|
||||
if (collectionBook.createdAt.valueOf() > mostRecentlyUpdatedAt) {
|
||||
mostRecentlyUpdatedAt = collectionBook.createdAt.valueOf()
|
||||
}
|
||||
})
|
||||
const hasBooksRemoved = collection.collectionBooks.length < feed.episodes.length
|
||||
|
||||
if (!feed.entityUpdatedAt || mostRecentlyUpdatedAt > feed.entityUpdatedAt) {
|
||||
if (!feed.entityUpdatedAt || hasBooksRemoved || mostRecentlyUpdatedAt > feed.entityUpdatedAt) {
|
||||
Logger.debug(`[RssFeedManager] Updating RSS feed for collection "${collection.name}"`)
|
||||
|
||||
feed.updateFromCollection(collectionExpanded)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,48 @@
|
|||
const SocketAuthority = require('../SocketAuthority')
|
||||
const Task = require('../objects/Task')
|
||||
|
||||
class TaskManager {
|
||||
constructor() {
|
||||
/** @type {Task[]} */
|
||||
this.tasks = []
|
||||
}
|
||||
|
||||
/**
|
||||
* Add task and emit socket task_started event
|
||||
*
|
||||
* @param {Task} task
|
||||
*/
|
||||
addTask(task) {
|
||||
this.tasks.push(task)
|
||||
SocketAuthority.emitter('task_started', task.toJSON())
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove task and emit task_finished event
|
||||
*
|
||||
* @param {Task} task
|
||||
*/
|
||||
taskFinished(task) {
|
||||
if (this.tasks.some(t => t.id === task.id)) {
|
||||
this.tasks = this.tasks.filter(t => t.id !== task.id)
|
||||
SocketAuthority.emitter('task_finished', task.toJSON())
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new task and add
|
||||
*
|
||||
* @param {string} action
|
||||
* @param {string} title
|
||||
* @param {string} description
|
||||
* @param {boolean} showSuccess
|
||||
* @param {Object} [data]
|
||||
*/
|
||||
createAndAddTask(action, title, description, showSuccess, data = {}) {
|
||||
const task = new Task()
|
||||
task.setData(action, title, description, showSuccess, data)
|
||||
this.addTask(task)
|
||||
return task
|
||||
}
|
||||
}
|
||||
module.exports = TaskManager
|
||||
module.exports = new TaskManager()
|
||||
|
|
@ -211,6 +211,32 @@ class Book extends Model {
|
|||
}
|
||||
}
|
||||
|
||||
getAbsMetadataJson() {
|
||||
return {
|
||||
tags: this.tags || [],
|
||||
chapters: this.chapters?.map(c => ({ ...c })) || [],
|
||||
title: this.title,
|
||||
subtitle: this.subtitle,
|
||||
authors: this.authors.map(a => a.name),
|
||||
narrators: this.narrators,
|
||||
series: this.series.map(se => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}),
|
||||
genres: this.genres || [],
|
||||
publishedYear: this.publishedYear,
|
||||
publishedDate: this.publishedDate,
|
||||
publisher: this.publisher,
|
||||
description: this.description,
|
||||
isbn: this.isbn,
|
||||
asin: this.asin,
|
||||
language: this.language,
|
||||
explicit: !!this.explicit,
|
||||
abridged: !!this.abridged
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ class Feed extends Model {
|
|||
|
||||
/**
|
||||
* Find all library item ids that have an open feed (used in library filter)
|
||||
* @returns {Promise<Array<String>>} array of library item ids
|
||||
* @returns {Promise<string[]>} array of library item ids
|
||||
*/
|
||||
static async findAllLibraryItemIds() {
|
||||
const feeds = await this.findAll({
|
||||
|
|
@ -122,8 +122,8 @@ class Feed extends Model {
|
|||
|
||||
/**
|
||||
* Find feed where and return oldFeed
|
||||
* @param {object} where sequelize where object
|
||||
* @returns {Promise<objects.Feed>} oldFeed
|
||||
* @param {Object} where sequelize where object
|
||||
* @returns {Promise<oldFeed>} oldFeed
|
||||
*/
|
||||
static async findOneOld(where) {
|
||||
if (!where) return null
|
||||
|
|
@ -140,7 +140,7 @@ class Feed extends Model {
|
|||
/**
|
||||
* Find feed and return oldFeed
|
||||
* @param {string} id
|
||||
* @returns {Promise<objects.Feed>} oldFeed
|
||||
* @returns {Promise<oldFeed>} oldFeed
|
||||
*/
|
||||
static async findByPkOld(id) {
|
||||
if (!id) return null
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ const oldLibrary = require('../objects/Library')
|
|||
* @property {string} autoScanCronExpression
|
||||
* @property {boolean} audiobooksOnly
|
||||
* @property {boolean} hideSingleBookSeries Do not show series that only have 1 book
|
||||
* @property {string[]} metadataPrecedence
|
||||
*/
|
||||
|
||||
class Library extends Model {
|
||||
|
|
@ -79,6 +80,9 @@ class Library extends Model {
|
|||
mediaType: libraryExpanded.mediaType,
|
||||
provider: libraryExpanded.provider,
|
||||
settings: libraryExpanded.settings,
|
||||
lastScan: libraryExpanded.lastScan?.valueOf() || null,
|
||||
lastScanVersion: libraryExpanded.lastScanVersion || null,
|
||||
lastScanMetadataPrecedence: libraryExpanded.extraData?.lastScanMetadataPrecedence || null,
|
||||
createdAt: libraryExpanded.createdAt.valueOf(),
|
||||
lastUpdate: libraryExpanded.updatedAt.valueOf()
|
||||
})
|
||||
|
|
@ -151,6 +155,9 @@ class Library extends Model {
|
|||
if (oldLibrary.oldLibraryId) {
|
||||
extraData.oldLibraryId = oldLibrary.oldLibraryId
|
||||
}
|
||||
if (oldLibrary.lastScanMetadataPrecedence) {
|
||||
extraData.lastScanMetadataPrecedence = oldLibrary.lastScanMetadataPrecedence
|
||||
}
|
||||
return {
|
||||
id: oldLibrary.id,
|
||||
name: oldLibrary.name,
|
||||
|
|
@ -159,6 +166,8 @@ class Library extends Model {
|
|||
mediaType: oldLibrary.mediaType || null,
|
||||
provider: oldLibrary.provider,
|
||||
settings: oldLibrary.settings?.toJSON() || {},
|
||||
lastScan: oldLibrary.lastScan || null,
|
||||
lastScanVersion: oldLibrary.lastScanVersion || null,
|
||||
createdAt: oldLibrary.createdAt,
|
||||
updatedAt: oldLibrary.lastUpdate,
|
||||
extraData
|
||||
|
|
@ -224,7 +233,7 @@ class Library extends Model {
|
|||
for (let i = 0; i < libraries.length; i++) {
|
||||
const library = libraries[i]
|
||||
if (library.displayOrder !== i + 1) {
|
||||
Logger.dev(`[Library] Updating display order of library from ${library.displayOrder} to ${i + 1}`)
|
||||
Logger.debug(`[Library] Updating display order of library from ${library.displayOrder} to ${i + 1}`)
|
||||
await library.update({ displayOrder: i + 1 }).catch((error) => {
|
||||
Logger.error(`[Library] Failed to update library display order to ${i + 1}`, error)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class LibraryItem extends Model {
|
|||
*
|
||||
* @param {number} offset
|
||||
* @param {number} limit
|
||||
* @returns {Promise<Model<LibraryItem>[]>} LibraryItem
|
||||
* @returns {Promise<LibraryItem[]>} LibraryItem
|
||||
*/
|
||||
static getLibraryItemsIncrement(offset, limit, where = null) {
|
||||
return this.findAll({
|
||||
|
|
@ -264,7 +264,7 @@ class LibraryItem extends Model {
|
|||
for (const existingPodcastEpisode of existingPodcastEpisodes) {
|
||||
// Episode was removed
|
||||
if (!updatedPodcastEpisodes.some(ep => ep.id === existingPodcastEpisode.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
|
||||
await existingPodcastEpisode.destroy()
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -272,7 +272,7 @@ class LibraryItem extends Model {
|
|||
for (const updatedPodcastEpisode of updatedPodcastEpisodes) {
|
||||
const existingEpisodeMatch = existingPodcastEpisodes.find(ep => ep.id === updatedPodcastEpisode.id)
|
||||
if (!existingEpisodeMatch) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
|
||||
await this.sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode)
|
||||
hasUpdates = true
|
||||
} else {
|
||||
|
|
@ -283,7 +283,7 @@ class LibraryItem extends Model {
|
|||
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
|
||||
|
||||
if (!areEquivalent(updatedEpisodeCleaned[key], existingValue, true)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
|
||||
episodeHasUpdates = true
|
||||
}
|
||||
}
|
||||
|
|
@ -304,7 +304,7 @@ class LibraryItem extends Model {
|
|||
for (const existingAuthor of existingAuthors) {
|
||||
// Author was removed from Book
|
||||
if (!updatedAuthors.some(au => au.id === existingAuthor.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
|
||||
await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -312,7 +312,7 @@ class LibraryItem extends Model {
|
|||
for (const updatedAuthor of updatedAuthors) {
|
||||
// Author was added
|
||||
if (!existingAuthors.some(au => au.id === updatedAuthor.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
|
||||
await this.sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id })
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -320,7 +320,7 @@ class LibraryItem extends Model {
|
|||
for (const existingSeries of existingSeriesAll) {
|
||||
// Series was removed
|
||||
if (!updatedSeriesAll.some(se => se.id === existingSeries.id)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
|
||||
await this.sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id)
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -329,11 +329,11 @@ class LibraryItem extends Model {
|
|||
// Series was added/updated
|
||||
const existingSeriesMatch = existingSeriesAll.find(se => se.id === updatedSeries.id)
|
||||
if (!existingSeriesMatch) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
|
||||
await this.sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence })
|
||||
hasUpdates = true
|
||||
} else if (existingSeriesMatch.bookSeries.sequence !== updatedSeries.sequence) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
|
||||
await existingSeriesMatch.bookSeries.update({ id: updatedSeries.id, sequence: updatedSeries.sequence })
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
@ -346,7 +346,7 @@ class LibraryItem extends Model {
|
|||
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
|
||||
|
||||
if (!areEquivalent(updatedMedia[key], existingValue, true)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
|
||||
hasMediaUpdates = true
|
||||
}
|
||||
}
|
||||
|
|
@ -363,7 +363,7 @@ class LibraryItem extends Model {
|
|||
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
|
||||
|
||||
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
|
||||
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
|
||||
hasLibraryItemUpdates = true
|
||||
}
|
||||
}
|
||||
|
|
@ -419,40 +419,45 @@ class LibraryItem extends Model {
|
|||
*/
|
||||
static async getOldById(libraryItemId) {
|
||||
if (!libraryItemId) return null
|
||||
const libraryItem = await this.findByPk(libraryItemId, {
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.book,
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.author,
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
{
|
||||
model: this.sequelize.models.series,
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
|
||||
const libraryItem = await this.findByPk(libraryItemId)
|
||||
if (!libraryItem) {
|
||||
Logger.error(`[LibraryItem] Library item not found with id "${libraryItemId}"`)
|
||||
return null
|
||||
}
|
||||
|
||||
if (libraryItem.mediaType === 'podcast') {
|
||||
libraryItem.media = await libraryItem.getMedia({
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.podcastEpisode
|
||||
}
|
||||
]
|
||||
})
|
||||
} else {
|
||||
libraryItem.media = await libraryItem.getMedia({
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.author,
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
model: this.sequelize.models.podcast,
|
||||
include: [
|
||||
{
|
||||
model: this.sequelize.models.podcastEpisode
|
||||
},
|
||||
{
|
||||
model: this.sequelize.models.series,
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
order: [
|
||||
[this.sequelize.models.book, this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'],
|
||||
[this.sequelize.models.book, this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC']
|
||||
]
|
||||
})
|
||||
if (!libraryItem) return null
|
||||
}
|
||||
],
|
||||
order: [
|
||||
[this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'],
|
||||
[this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC']
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (!libraryItem.media) return null
|
||||
return this.getOldLibraryItem(libraryItem)
|
||||
}
|
||||
|
||||
|
|
@ -536,7 +541,7 @@ class LibraryItem extends Model {
|
|||
})
|
||||
}
|
||||
}
|
||||
Logger.dev(`Loaded ${itemsInProgressPayload.items.length} of ${itemsInProgressPayload.count} items for "Continue Listening/Reading" in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${itemsInProgressPayload.items.length} of ${itemsInProgressPayload.count} items for "Continue Listening/Reading" in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
|
||||
|
||||
let start = Date.now()
|
||||
if (library.isBook) {
|
||||
|
|
@ -553,7 +558,7 @@ class LibraryItem extends Model {
|
|||
total: continueSeriesPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${continueSeriesPayload.libraryItems.length} of ${continueSeriesPayload.count} items for "Continue Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${continueSeriesPayload.libraryItems.length} of ${continueSeriesPayload.count} items for "Continue Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
} else if (library.isPodcast) {
|
||||
// "Newest Episodes" shelf
|
||||
const newestEpisodesPayload = await libraryFilters.getNewestPodcastEpisodes(library, user, limit)
|
||||
|
|
@ -567,7 +572,7 @@ class LibraryItem extends Model {
|
|||
total: newestEpisodesPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${newestEpisodesPayload.libraryItems.length} of ${newestEpisodesPayload.count} episodes for "Newest Episodes" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${newestEpisodesPayload.libraryItems.length} of ${newestEpisodesPayload.count} episodes for "Newest Episodes" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
start = Date.now()
|
||||
|
|
@ -583,7 +588,7 @@ class LibraryItem extends Model {
|
|||
total: mostRecentPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${mostRecentPayload.libraryItems.length} of ${mostRecentPayload.count} items for "Recently Added" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${mostRecentPayload.libraryItems.length} of ${mostRecentPayload.count} items for "Recently Added" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
|
||||
if (library.isBook) {
|
||||
start = Date.now()
|
||||
|
|
@ -599,7 +604,7 @@ class LibraryItem extends Model {
|
|||
total: seriesMostRecentPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${seriesMostRecentPayload.series.length} of ${seriesMostRecentPayload.count} series for "Recent Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${seriesMostRecentPayload.series.length} of ${seriesMostRecentPayload.count} series for "Recent Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
|
||||
start = Date.now()
|
||||
// "Discover" shelf
|
||||
|
|
@ -614,7 +619,7 @@ class LibraryItem extends Model {
|
|||
total: discoverLibraryItemsPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${discoverLibraryItemsPayload.libraryItems.length} of ${discoverLibraryItemsPayload.count} items for "Discover" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${discoverLibraryItemsPayload.libraryItems.length} of ${discoverLibraryItemsPayload.count} items for "Discover" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
start = Date.now()
|
||||
|
|
@ -645,7 +650,7 @@ class LibraryItem extends Model {
|
|||
})
|
||||
}
|
||||
}
|
||||
Logger.dev(`Loaded ${mediaFinishedPayload.items.length} of ${mediaFinishedPayload.count} items for "Listen/Read Again" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${mediaFinishedPayload.items.length} of ${mediaFinishedPayload.count} items for "Listen/Read Again" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
|
||||
if (library.isBook) {
|
||||
start = Date.now()
|
||||
|
|
@ -661,7 +666,7 @@ class LibraryItem extends Model {
|
|||
total: newestAuthorsPayload.count
|
||||
})
|
||||
}
|
||||
Logger.dev(`Loaded ${newestAuthorsPayload.authors.length} of ${newestAuthorsPayload.count} authors for "Newest Authors" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
Logger.debug(`Loaded ${newestAuthorsPayload.authors.length} of ${newestAuthorsPayload.count} authors for "Newest Authors" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
Logger.debug(`Loaded ${shelves.length} personalized shelves in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
|
||||
|
|
|
|||
|
|
@ -112,6 +112,25 @@ class Podcast extends Model {
|
|||
}
|
||||
}
|
||||
|
||||
getAbsMetadataJson() {
|
||||
return {
|
||||
tags: this.tags || [],
|
||||
title: this.title,
|
||||
author: this.author,
|
||||
description: this.description,
|
||||
releaseDate: this.releaseDate,
|
||||
genres: this.genres || [],
|
||||
feedURL: this.feedURL,
|
||||
imageURL: this.imageURL,
|
||||
itunesPageURL: this.itunesPageURL,
|
||||
itunesId: this.itunesId,
|
||||
itunesArtistId: this.itunesArtistId,
|
||||
language: this.language,
|
||||
explicit: !!this.explicit,
|
||||
podcastType: this.podcastType
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
|
|
|
|||
|
|
@ -79,6 +79,7 @@ class PodcastEpisode extends Model {
|
|||
subtitle: this.subtitle,
|
||||
description: this.description,
|
||||
enclosure,
|
||||
guid: this.extraData?.guid || null,
|
||||
pubDate: this.pubDate,
|
||||
chapters: this.chapters,
|
||||
audioFile: this.audioFile,
|
||||
|
|
@ -98,6 +99,9 @@ class PodcastEpisode extends Model {
|
|||
if (oldEpisode.oldEpisodeId) {
|
||||
extraData.oldEpisodeId = oldEpisode.oldEpisodeId
|
||||
}
|
||||
if (oldEpisode.guid) {
|
||||
extraData.guid = oldEpisode.guid
|
||||
}
|
||||
return {
|
||||
id: oldEpisode.id,
|
||||
index: oldEpisode.index,
|
||||
|
|
@ -148,7 +152,12 @@ class PodcastEpisode extends Model {
|
|||
extraData: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'podcastEpisode'
|
||||
modelName: 'podcastEpisode',
|
||||
indexes: [
|
||||
{
|
||||
fields: ['createdAt']
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const { podcast } = sequelize.models
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const { DataTypes, Model, Op } = require('sequelize')
|
||||
const sequelize = require('sequelize')
|
||||
const Logger = require('../Logger')
|
||||
const oldUser = require('../objects/user/User')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const { DataTypes, Model } = sequelize
|
||||
|
||||
class User extends Model {
|
||||
constructor(values, options) {
|
||||
|
|
@ -46,6 +48,12 @@ class User extends Model {
|
|||
return users.map(u => this.getOldUser(u))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get old user model from new
|
||||
*
|
||||
* @param {Object} userExpanded
|
||||
* @returns {oldUser}
|
||||
*/
|
||||
static getOldUser(userExpanded) {
|
||||
const mediaProgress = userExpanded.mediaProgresses.map(mp => mp.getOldMediaProgress())
|
||||
|
||||
|
|
@ -59,6 +67,7 @@ class User extends Model {
|
|||
id: userExpanded.id,
|
||||
oldUserId: userExpanded.extraData?.oldUserId || null,
|
||||
username: userExpanded.username,
|
||||
email: userExpanded.email || null,
|
||||
pash: userExpanded.pash,
|
||||
type: userExpanded.type,
|
||||
token: userExpanded.token,
|
||||
|
|
@ -71,18 +80,32 @@ class User extends Model {
|
|||
createdAt: userExpanded.createdAt.valueOf(),
|
||||
permissions,
|
||||
librariesAccessible,
|
||||
itemTagsSelected
|
||||
itemTagsSelected,
|
||||
authOpenIDSub: userExpanded.extraData?.authOpenIDSub || null
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {oldUser} oldUser
|
||||
* @returns {Promise<User>}
|
||||
*/
|
||||
static createFromOld(oldUser) {
|
||||
const user = this.getFromOld(oldUser)
|
||||
return this.create(user)
|
||||
}
|
||||
|
||||
static updateFromOld(oldUser) {
|
||||
/**
|
||||
* Update User from old user model
|
||||
*
|
||||
* @param {oldUser} oldUser
|
||||
* @param {boolean} [hooks=true] Run before / after bulk update hooks?
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
static updateFromOld(oldUser, hooks = true) {
|
||||
const user = this.getFromOld(oldUser)
|
||||
return this.update(user, {
|
||||
hooks: !!hooks,
|
||||
where: {
|
||||
id: user.id
|
||||
}
|
||||
|
|
@ -92,19 +115,31 @@ class User extends Model {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get new User model from old
|
||||
*
|
||||
* @param {oldUser} oldUser
|
||||
* @returns {Object}
|
||||
*/
|
||||
static getFromOld(oldUser) {
|
||||
const extraData = {
|
||||
seriesHideFromContinueListening: oldUser.seriesHideFromContinueListening || [],
|
||||
oldUserId: oldUser.oldUserId
|
||||
}
|
||||
if (oldUser.authOpenIDSub) {
|
||||
extraData.authOpenIDSub = oldUser.authOpenIDSub
|
||||
}
|
||||
|
||||
return {
|
||||
id: oldUser.id,
|
||||
username: oldUser.username,
|
||||
email: oldUser.email || null,
|
||||
pash: oldUser.pash || null,
|
||||
type: oldUser.type || null,
|
||||
token: oldUser.token || null,
|
||||
isActive: !!oldUser.isActive,
|
||||
lastSeen: oldUser.lastSeen || null,
|
||||
extraData: {
|
||||
seriesHideFromContinueListening: oldUser.seriesHideFromContinueListening || [],
|
||||
oldUserId: oldUser.oldUserId
|
||||
},
|
||||
extraData,
|
||||
createdAt: oldUser.createdAt || Date.now(),
|
||||
permissions: {
|
||||
...oldUser.permissions,
|
||||
|
|
@ -128,12 +163,12 @@ class User extends Model {
|
|||
* @param {string} username
|
||||
* @param {string} pash
|
||||
* @param {Auth} auth
|
||||
* @returns {oldUser}
|
||||
* @returns {Promise<oldUser>}
|
||||
*/
|
||||
static async createRootUser(username, pash, auth) {
|
||||
const userId = uuidv4()
|
||||
|
||||
const token = await auth.generateAccessToken({ userId, username })
|
||||
const token = await auth.generateAccessToken({ id: userId, username })
|
||||
|
||||
const newRoot = new oldUser({
|
||||
id: userId,
|
||||
|
|
@ -148,6 +183,38 @@ class User extends Model {
|
|||
return newRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Create user from openid userinfo
|
||||
* @param {Object} userinfo
|
||||
* @param {Auth} auth
|
||||
* @returns {Promise<oldUser>}
|
||||
*/
|
||||
static async createUserFromOpenIdUserInfo(userinfo, auth) {
|
||||
const userId = uuidv4()
|
||||
// TODO: Ensure username is unique?
|
||||
const username = userinfo.preferred_username || userinfo.name || userinfo.sub
|
||||
const email = (userinfo.email && userinfo.email_verified) ? userinfo.email : null
|
||||
|
||||
const token = await auth.generateAccessToken({ id: userId, username })
|
||||
|
||||
const newUser = new oldUser({
|
||||
id: userId,
|
||||
type: 'user',
|
||||
username,
|
||||
email,
|
||||
pash: null,
|
||||
token,
|
||||
isActive: true,
|
||||
authOpenIDSub: userinfo.sub,
|
||||
createdAt: Date.now()
|
||||
})
|
||||
if (await this.createFromOld(newUser)) {
|
||||
SocketAuthority.adminEmitter('user_added', newUser.toJSONForBrowser())
|
||||
return newUser
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a user by id or by the old database id
|
||||
* @temp User ids were updated in v2.3.0 migration and old API tokens may still use that id
|
||||
|
|
@ -158,13 +225,13 @@ class User extends Model {
|
|||
if (!userId) return null
|
||||
const user = await this.findOne({
|
||||
where: {
|
||||
[Op.or]: [
|
||||
[sequelize.Op.or]: [
|
||||
{
|
||||
id: userId
|
||||
},
|
||||
{
|
||||
extraData: {
|
||||
[Op.substring]: userId
|
||||
[sequelize.Op.substring]: userId
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
@ -185,7 +252,26 @@ class User extends Model {
|
|||
const user = await this.findOne({
|
||||
where: {
|
||||
username: {
|
||||
[Op.like]: username
|
||||
[sequelize.Op.like]: username
|
||||
}
|
||||
},
|
||||
include: this.sequelize.models.mediaProgress
|
||||
})
|
||||
if (!user) return null
|
||||
return this.getOldUser(user)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user by email case insensitive
|
||||
* @param {string} username
|
||||
* @returns {Promise<oldUser|null>} returns null if not found
|
||||
*/
|
||||
static async getUserByEmail(email) {
|
||||
if (!email) return null
|
||||
const user = await this.findOne({
|
||||
where: {
|
||||
email: {
|
||||
[sequelize.Op.like]: email
|
||||
}
|
||||
},
|
||||
include: this.sequelize.models.mediaProgress
|
||||
|
|
@ -208,6 +294,21 @@ class User extends Model {
|
|||
return this.getOldUser(user)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user by openid sub
|
||||
* @param {string} sub
|
||||
* @returns {Promise<oldUser|null>} returns null if not found
|
||||
*/
|
||||
static async getUserByOpenIDSub(sub) {
|
||||
if (!sub) return null
|
||||
const user = await this.findOne({
|
||||
where: sequelize.where(sequelize.literal(`extraData->>"authOpenIDSub"`), sub),
|
||||
include: this.sequelize.models.mediaProgress
|
||||
})
|
||||
if (!user) return null
|
||||
return this.getOldUser(user)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get array of user id and username
|
||||
* @returns {object[]} { id, username }
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const Path = require('path')
|
||||
const uuidv4 = require("uuid").v4
|
||||
const FeedMeta = require('./FeedMeta')
|
||||
const FeedEpisode = require('./FeedEpisode')
|
||||
|
|
@ -101,11 +102,13 @@ class Feed {
|
|||
this.serverAddress = serverAddress
|
||||
this.feedUrl = feedUrl
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta = new FeedMeta()
|
||||
this.meta.title = mediaMetadata.title
|
||||
this.meta.description = mediaMetadata.description
|
||||
this.meta.author = author
|
||||
this.meta.imageUrl = media.coverPath ? `${serverAddress}/feed/${slug}/cover` : `${serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = media.coverPath ? `${serverAddress}/feed/${slug}/cover${coverFileExtension}` : `${serverAddress}/Logo.png`
|
||||
this.meta.feedUrl = feedUrl
|
||||
this.meta.link = `${serverAddress}/item/${libraryItem.id}`
|
||||
this.meta.explicit = !!mediaMetadata.explicit
|
||||
|
|
@ -145,10 +148,12 @@ class Feed {
|
|||
this.entityUpdatedAt = libraryItem.updatedAt
|
||||
this.coverPath = media.coverPath || null
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta.title = mediaMetadata.title
|
||||
this.meta.description = mediaMetadata.description
|
||||
this.meta.author = author
|
||||
this.meta.imageUrl = media.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = media.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover${coverFileExtension}` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.explicit = !!mediaMetadata.explicit
|
||||
this.meta.type = mediaMetadata.type
|
||||
this.meta.language = mediaMetadata.language
|
||||
|
|
@ -174,7 +179,7 @@ class Feed {
|
|||
this.xml = null
|
||||
}
|
||||
|
||||
setFromCollection(userId, slug, collectionExpanded, serverAddress) {
|
||||
setFromCollection(userId, slug, collectionExpanded, serverAddress, preventIndexing = true, ownerName = null, ownerEmail = null) {
|
||||
const feedUrl = `${serverAddress}/feed/${slug}`
|
||||
|
||||
const itemsWithTracks = collectionExpanded.books.filter(libraryItem => libraryItem.media.tracks.length)
|
||||
|
|
@ -190,14 +195,19 @@ class Feed {
|
|||
this.serverAddress = serverAddress
|
||||
this.feedUrl = feedUrl
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta = new FeedMeta()
|
||||
this.meta.title = collectionExpanded.name
|
||||
this.meta.description = collectionExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover` : `${serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover${coverFileExtension}` : `${serverAddress}/Logo.png`
|
||||
this.meta.feedUrl = feedUrl
|
||||
this.meta.link = `${serverAddress}/collection/${collectionExpanded.id}`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
this.meta.preventIndexing = preventIndexing
|
||||
this.meta.ownerName = ownerName
|
||||
this.meta.ownerEmail = ownerEmail
|
||||
|
||||
this.episodes = []
|
||||
|
||||
|
|
@ -222,10 +232,12 @@ class Feed {
|
|||
this.entityUpdatedAt = collectionExpanded.lastUpdate
|
||||
this.coverPath = firstItemWithCover?.coverPath || null
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta.title = collectionExpanded.name
|
||||
this.meta.description = collectionExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover${coverFileExtension}` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
|
||||
this.episodes = []
|
||||
|
|
@ -244,7 +256,7 @@ class Feed {
|
|||
this.xml = null
|
||||
}
|
||||
|
||||
setFromSeries(userId, slug, seriesExpanded, serverAddress) {
|
||||
setFromSeries(userId, slug, seriesExpanded, serverAddress, preventIndexing = true, ownerName = null, ownerEmail = null) {
|
||||
const feedUrl = `${serverAddress}/feed/${slug}`
|
||||
|
||||
let itemsWithTracks = seriesExpanded.books.filter(libraryItem => libraryItem.media.tracks.length)
|
||||
|
|
@ -264,14 +276,19 @@ class Feed {
|
|||
this.serverAddress = serverAddress
|
||||
this.feedUrl = feedUrl
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta = new FeedMeta()
|
||||
this.meta.title = seriesExpanded.name
|
||||
this.meta.description = seriesExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover` : `${serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${serverAddress}/feed/${slug}/cover${coverFileExtension}` : `${serverAddress}/Logo.png`
|
||||
this.meta.feedUrl = feedUrl
|
||||
this.meta.link = `${serverAddress}/library/${libraryId}/series/${seriesExpanded.id}`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
this.meta.preventIndexing = preventIndexing
|
||||
this.meta.ownerName = ownerName
|
||||
this.meta.ownerEmail = ownerEmail
|
||||
|
||||
this.episodes = []
|
||||
|
||||
|
|
@ -299,10 +316,12 @@ class Feed {
|
|||
this.entityUpdatedAt = seriesExpanded.updatedAt
|
||||
this.coverPath = firstItemWithCover?.coverPath || null
|
||||
|
||||
const coverFileExtension = this.coverPath ? Path.extname(media.coverPath) : null
|
||||
|
||||
this.meta.title = seriesExpanded.name
|
||||
this.meta.description = seriesExpanded.description || ''
|
||||
this.meta.author = this.getAuthorsStringFromLibraryItems(itemsWithTracks)
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.imageUrl = this.coverPath ? `${this.serverAddress}/feed/${this.slug}/cover${coverFileExtension}` : `${this.serverAddress}/Logo.png`
|
||||
this.meta.explicit = !!itemsWithTracks.some(li => li.media.metadata.explicit) // explicit if any item is explicit
|
||||
|
||||
this.episodes = []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const Path = require('path')
|
||||
const uuidv4 = require("uuid").v4
|
||||
const date = require('../libs/dateAndTime')
|
||||
const { secondsToTimestamp } = require('../utils/index')
|
||||
|
|
@ -69,7 +70,8 @@ class FeedEpisode {
|
|||
}
|
||||
|
||||
setFromPodcastEpisode(libraryItem, serverAddress, slug, episode, meta) {
|
||||
const contentUrl = `/feed/${slug}/item/${episode.id}/${episode.audioFile.metadata.filename}`
|
||||
const contentFileExtension = Path.extname(episode.audioFile.metadata.filename)
|
||||
const contentUrl = `/feed/${slug}/item/${episode.id}/media${contentFileExtension}`
|
||||
const media = libraryItem.media
|
||||
const mediaMetadata = media.metadata
|
||||
|
||||
|
|
@ -108,7 +110,8 @@ class FeedEpisode {
|
|||
// e.g. Track 1 will have a pub date before Track 2
|
||||
const audiobookPubDate = date.format(new Date(libraryItem.addedAt + timeOffset), 'ddd, DD MMM YYYY HH:mm:ss [GMT]')
|
||||
|
||||
const contentUrl = `/feed/${slug}/item/${episodeId}/${audioTrack.metadata.filename}`
|
||||
const contentFileExtension = Path.extname(audioTrack.metadata.filename)
|
||||
const contentUrl = `/feed/${slug}/item/${episodeId}/media${contentFileExtension}`
|
||||
const media = libraryItem.media
|
||||
const mediaMetadata = media.metadata
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,9 @@ class Library {
|
|||
this.provider = 'google'
|
||||
|
||||
this.lastScan = 0
|
||||
this.lastScanVersion = null
|
||||
this.lastScanMetadataPrecedence = null
|
||||
|
||||
this.settings = null
|
||||
|
||||
this.createdAt = null
|
||||
|
|
@ -53,6 +56,10 @@ class Library {
|
|||
this.settings.disableWatcher = !!library.disableWatcher
|
||||
}
|
||||
|
||||
this.lastScan = library.lastScan
|
||||
this.lastScanVersion = library.lastScanVersion
|
||||
this.lastScanMetadataPrecedence = library.lastScanMetadataPrecedence
|
||||
|
||||
this.createdAt = library.createdAt
|
||||
this.lastUpdate = library.lastUpdate
|
||||
this.cleanOldValues() // mediaType changed for v2 and icon change for v2.2.2
|
||||
|
|
@ -84,6 +91,8 @@ class Library {
|
|||
mediaType: this.mediaType,
|
||||
provider: this.provider,
|
||||
settings: this.settings.toJSON(),
|
||||
lastScan: this.lastScan,
|
||||
lastScanVersion: this.lastScanVersion,
|
||||
createdAt: this.createdAt,
|
||||
lastUpdate: this.lastUpdate
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const fs = require('../libs/fsExtra')
|
||||
const Path = require('path')
|
||||
const { version } = require('../../package.json')
|
||||
const Logger = require('../Logger')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const LibraryFile = require('./files/LibraryFile')
|
||||
const Book = require('./mediaTypes/Book')
|
||||
const Podcast = require('./mediaTypes/Podcast')
|
||||
const Video = require('./mediaTypes/Video')
|
||||
const Music = require('./mediaTypes/Music')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch } = require('../utils/index')
|
||||
const { areEquivalent, copyValue } = require('../utils/index')
|
||||
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
|
||||
class LibraryItem {
|
||||
|
|
@ -180,34 +178,23 @@ class LibraryItem {
|
|||
this.libraryFiles.forEach((lf) => total += lf.metadata.size)
|
||||
return total
|
||||
}
|
||||
get audioFileTotalSize() {
|
||||
let total = 0
|
||||
this.libraryFiles.filter(lf => lf.fileType == 'audio').forEach((lf) => total += lf.metadata.size)
|
||||
return total
|
||||
}
|
||||
get hasAudioFiles() {
|
||||
return this.libraryFiles.some(lf => lf.fileType === 'audio')
|
||||
}
|
||||
get hasMediaEntities() {
|
||||
return this.media.hasMediaEntities
|
||||
}
|
||||
get hasIssues() {
|
||||
if (this.isMissing || this.isInvalid) return true
|
||||
return this.media.hasIssues
|
||||
}
|
||||
|
||||
// Data comes from scandir library item data
|
||||
// TODO: Remove this function. Only used when creating a new podcast now
|
||||
setData(libraryMediaType, payload) {
|
||||
this.id = uuidv4()
|
||||
this.mediaType = libraryMediaType
|
||||
if (libraryMediaType === 'video') {
|
||||
this.media = new Video()
|
||||
} else if (libraryMediaType === 'podcast') {
|
||||
if (libraryMediaType === 'podcast') {
|
||||
this.media = new Podcast()
|
||||
} else if (libraryMediaType === 'book') {
|
||||
this.media = new Book()
|
||||
} else if (libraryMediaType === 'music') {
|
||||
this.media = new Music()
|
||||
} else {
|
||||
Logger.error(`[LibraryItem] setData called with unsupported media type "${libraryMediaType}"`)
|
||||
return
|
||||
}
|
||||
this.media.id = uuidv4()
|
||||
this.media.libraryItemId = this.id
|
||||
|
|
@ -270,85 +257,13 @@ class LibraryItem {
|
|||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
setInvalid() {
|
||||
this.isInvalid = true
|
||||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
setLastScan() {
|
||||
this.lastScan = Date.now()
|
||||
this.updatedAt = Date.now()
|
||||
this.scanVersion = version
|
||||
}
|
||||
|
||||
// Returns null if file not found, true if file was updated, false if up to date
|
||||
// updates existing LibraryFile, AudioFile, EBookFile's
|
||||
checkFileFound(fileFound) {
|
||||
let hasUpdated = false
|
||||
|
||||
let existingFile = this.libraryFiles.find(lf => lf.ino === fileFound.ino)
|
||||
let mediaFile = null
|
||||
if (!existingFile) {
|
||||
existingFile = this.libraryFiles.find(lf => lf.metadata.path === fileFound.metadata.path)
|
||||
if (existingFile) {
|
||||
// Update media file ino
|
||||
mediaFile = this.media.findFileWithInode(existingFile.ino)
|
||||
if (mediaFile) {
|
||||
mediaFile.ino = fileFound.ino
|
||||
}
|
||||
|
||||
// file inode was updated
|
||||
existingFile.ino = fileFound.ino
|
||||
hasUpdated = true
|
||||
} else {
|
||||
// file not found
|
||||
return null
|
||||
}
|
||||
} else {
|
||||
mediaFile = this.media.findFileWithInode(existingFile.ino)
|
||||
}
|
||||
|
||||
if (existingFile.metadata.path !== fileFound.metadata.path) {
|
||||
existingFile.metadata.path = fileFound.metadata.path
|
||||
existingFile.metadata.relPath = fileFound.metadata.relPath
|
||||
if (mediaFile) {
|
||||
mediaFile.metadata.path = fileFound.metadata.path
|
||||
mediaFile.metadata.relPath = fileFound.metadata.relPath
|
||||
}
|
||||
hasUpdated = true
|
||||
}
|
||||
|
||||
// FileMetadata keys
|
||||
['filename', 'ext', 'mtimeMs', 'ctimeMs', 'birthtimeMs', 'size'].forEach((key) => {
|
||||
if (existingFile.metadata[key] !== fileFound.metadata[key]) {
|
||||
// Add modified flag on file data object if exists and was changed
|
||||
if (key === 'mtimeMs' && existingFile.metadata[key]) {
|
||||
fileFound.metadata.wasModified = true
|
||||
}
|
||||
|
||||
existingFile.metadata[key] = fileFound.metadata[key]
|
||||
if (mediaFile) {
|
||||
if (key === 'mtimeMs') mediaFile.metadata.wasModified = true
|
||||
mediaFile.metadata[key] = fileFound.metadata[key]
|
||||
}
|
||||
hasUpdated = true
|
||||
}
|
||||
})
|
||||
|
||||
return hasUpdated
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
query = cleanStringForSearch(query)
|
||||
return this.media.searchQuery(query)
|
||||
}
|
||||
|
||||
getDirectPlayTracklist(episodeId) {
|
||||
return this.media.getDirectPlayTracklist(episodeId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Save metadata.json/metadata.abs file
|
||||
* Save metadata.json file
|
||||
* TODO: Move to new LibraryItem model
|
||||
* @returns {Promise<LibraryFile>} null if not saved
|
||||
*/
|
||||
async saveMetadata() {
|
||||
|
|
@ -366,91 +281,41 @@ class LibraryItem {
|
|||
await fs.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${metadataFileFormat}`)
|
||||
if (metadataFileFormat === 'json') {
|
||||
// Remove metadata.abs if it exists
|
||||
if (await fs.pathExists(Path.join(metadataPath, `metadata.abs`))) {
|
||||
Logger.debug(`[LibraryItem] Removing metadata.abs for item "${this.media.metadata.title}"`)
|
||||
await fs.remove(Path.join(metadataPath, `metadata.abs`))
|
||||
this.libraryFiles = this.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.abs`)))
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
|
||||
return fs.writeFile(metadataFilePath, JSON.stringify(this.media.toJSONForMetadataFile(), null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = this.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
metadataLibraryFile = new LibraryFile()
|
||||
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
this.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
|
||||
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
|
||||
}
|
||||
}
|
||||
|
||||
return fs.writeFile(metadataFilePath, JSON.stringify(this.media.toJSONForMetadataFile(), null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = this.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
metadataLibraryFile = new LibraryFile()
|
||||
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
this.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
|
||||
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
|
||||
}
|
||||
}
|
||||
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
Logger.error(`[LibraryItem] Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
}).finally(() => {
|
||||
this.isSavingMetadata = false
|
||||
})
|
||||
} else {
|
||||
// Remove metadata.json if it exists
|
||||
if (await fs.pathExists(Path.join(metadataPath, `metadata.json`))) {
|
||||
Logger.debug(`[LibraryItem] Removing metadata.json for item "${this.media.metadata.title}"`)
|
||||
await fs.remove(Path.join(metadataPath, `metadata.json`))
|
||||
this.libraryFiles = this.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.json`)))
|
||||
}
|
||||
|
||||
return abmetadataGenerator.generate(this, metadataFilePath).then(async (success) => {
|
||||
if (!success) {
|
||||
Logger.error(`[LibraryItem] Failed saving abmetadata to "${metadataFilePath}"`)
|
||||
return null
|
||||
}
|
||||
// Add metadata.abs to libraryFiles array if it is new
|
||||
let metadataLibraryFile = this.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
metadataLibraryFile = new LibraryFile()
|
||||
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.abs`)
|
||||
this.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
|
||||
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
|
||||
}
|
||||
}
|
||||
|
||||
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
|
||||
return metadataLibraryFile
|
||||
}).finally(() => {
|
||||
this.isSavingMetadata = false
|
||||
})
|
||||
}
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
Logger.error(`[LibraryItem] Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
}).finally(() => {
|
||||
this.isSavingMetadata = false
|
||||
})
|
||||
}
|
||||
|
||||
removeLibraryFile(ino) {
|
||||
|
|
|
|||
|
|
@ -101,7 +101,6 @@ class Stream extends EventEmitter {
|
|||
return 'mpegts'
|
||||
}
|
||||
get segmentBasename() {
|
||||
if (this.hlsSegmentType === 'fmp4') return 'output-%d.m4s'
|
||||
return 'output-%d.ts'
|
||||
}
|
||||
get segmentStartNumber() {
|
||||
|
|
@ -142,19 +141,21 @@ class Stream extends EventEmitter {
|
|||
|
||||
async checkSegmentNumberRequest(segNum) {
|
||||
const segStartTime = segNum * this.segmentLength
|
||||
if (this.startTime > segStartTime) {
|
||||
Logger.warn(`[STREAM] Segment #${segNum} Request @${secondsToTimestamp(segStartTime)} is before start time (${secondsToTimestamp(this.startTime)}) - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 2))
|
||||
if (this.segmentStartNumber > segNum) {
|
||||
Logger.warn(`[STREAM] Segment #${segNum} Request is before starting segment number #${this.segmentStartNumber} - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 5))
|
||||
return segStartTime
|
||||
} else if (this.isTranscodeComplete) {
|
||||
return false
|
||||
}
|
||||
|
||||
const distanceFromFurthestSegment = segNum - this.furthestSegmentCreated
|
||||
if (distanceFromFurthestSegment > 10) {
|
||||
Logger.info(`Segment #${segNum} requested is ${distanceFromFurthestSegment} segments from latest (${secondsToTimestamp(segStartTime)}) - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 2))
|
||||
return segStartTime
|
||||
if (this.furthestSegmentCreated) {
|
||||
const distanceFromFurthestSegment = segNum - this.furthestSegmentCreated
|
||||
if (distanceFromFurthestSegment > 10) {
|
||||
Logger.info(`Segment #${segNum} requested is ${distanceFromFurthestSegment} segments from latest (${secondsToTimestamp(segStartTime)}) - Reset Transcode`)
|
||||
await this.reset(segStartTime - (this.segmentLength * 5))
|
||||
return segStartTime
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
|
|
@ -171,7 +172,7 @@ class Stream extends EventEmitter {
|
|||
var files = await fs.readdir(this.streamPath)
|
||||
files.forEach((file) => {
|
||||
var extname = Path.extname(file)
|
||||
if (extname === '.ts' || extname === '.m4s') {
|
||||
if (extname === '.ts') {
|
||||
var basename = Path.basename(file, extname)
|
||||
var num_part = basename.split('-')[1]
|
||||
var part_num = Number(num_part)
|
||||
|
|
@ -251,6 +252,7 @@ class Stream extends EventEmitter {
|
|||
Logger.info(`[STREAM] START STREAM - Num Segments: ${this.numSegments}`)
|
||||
|
||||
this.ffmpeg = Ffmpeg()
|
||||
this.furthestSegmentCreated = 0
|
||||
|
||||
var adjustedStartTime = Math.max(this.startTime - this.maxSeekBackTime, 0)
|
||||
var trackStartTime = await writeConcatFile(this.tracks, this.concatFilesPath, adjustedStartTime)
|
||||
|
|
|
|||
|
|
@ -2,19 +2,30 @@ const uuidv4 = require("uuid").v4
|
|||
|
||||
class Task {
|
||||
constructor() {
|
||||
/** @type {string} */
|
||||
this.id = null
|
||||
/** @type {string} */
|
||||
this.action = null // e.g. embed-metadata, encode-m4b, etc
|
||||
/** @type {Object} custom data */
|
||||
this.data = null // additional info for the action like libraryItemId
|
||||
|
||||
/** @type {string} */
|
||||
this.title = null
|
||||
/** @type {string} */
|
||||
this.description = null
|
||||
/** @type {string} */
|
||||
this.error = null
|
||||
this.showSuccess = false // If true client side should keep the task visible after success
|
||||
/** @type {boolean} client should keep the task visible after success */
|
||||
this.showSuccess = false
|
||||
|
||||
/** @type {boolean} */
|
||||
this.isFailed = false
|
||||
/** @type {boolean} */
|
||||
this.isFinished = false
|
||||
|
||||
/** @type {number} */
|
||||
this.startedAt = null
|
||||
/** @type {number} */
|
||||
this.finishedAt = null
|
||||
}
|
||||
|
||||
|
|
@ -34,6 +45,15 @@ class Task {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set initial task data
|
||||
*
|
||||
* @param {string} action
|
||||
* @param {string} title
|
||||
* @param {string} description
|
||||
* @param {boolean} showSuccess
|
||||
* @param {Object} [data]
|
||||
*/
|
||||
setData(action, title, description, showSuccess, data = {}) {
|
||||
this.id = uuidv4()
|
||||
this.action = action
|
||||
|
|
@ -44,6 +64,11 @@ class Task {
|
|||
this.startedAt = Date.now()
|
||||
}
|
||||
|
||||
/**
|
||||
* Set task as failed
|
||||
*
|
||||
* @param {string} message error message
|
||||
*/
|
||||
setFailed(message) {
|
||||
this.error = message
|
||||
this.isFailed = true
|
||||
|
|
@ -51,6 +76,11 @@ class Task {
|
|||
this.setFinished()
|
||||
}
|
||||
|
||||
/**
|
||||
* Set task as finished
|
||||
*
|
||||
* @param {string} [newDescription] update description
|
||||
*/
|
||||
setFinished(newDescription = null) {
|
||||
if (newDescription) {
|
||||
this.description = newDescription
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const { cleanStringForSearch, areEquivalent, copyValue } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue } = require('../../utils/index')
|
||||
const AudioFile = require('../files/AudioFile')
|
||||
const AudioTrack = require('../files/AudioTrack')
|
||||
|
||||
|
|
@ -20,6 +18,7 @@ class PodcastEpisode {
|
|||
this.subtitle = null
|
||||
this.description = null
|
||||
this.enclosure = null
|
||||
this.guid = null
|
||||
this.pubDate = null
|
||||
this.chapters = []
|
||||
|
||||
|
|
@ -46,14 +45,17 @@ class PodcastEpisode {
|
|||
this.subtitle = episode.subtitle
|
||||
this.description = episode.description
|
||||
this.enclosure = episode.enclosure ? { ...episode.enclosure } : null
|
||||
this.guid = episode.guid || null
|
||||
this.pubDate = episode.pubDate
|
||||
this.chapters = episode.chapters?.map(ch => ({ ...ch })) || []
|
||||
this.audioFile = new AudioFile(episode.audioFile)
|
||||
this.audioFile = episode.audioFile ? new AudioFile(episode.audioFile) : null
|
||||
this.publishedAt = episode.publishedAt
|
||||
this.addedAt = episode.addedAt
|
||||
this.updatedAt = episode.updatedAt
|
||||
|
||||
this.audioFile.index = 1 // Only 1 audio file per episode
|
||||
if (this.audioFile) {
|
||||
this.audioFile.index = 1 // Only 1 audio file per episode
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
|
|
@ -70,9 +72,10 @@ class PodcastEpisode {
|
|||
subtitle: this.subtitle,
|
||||
description: this.description,
|
||||
enclosure: this.enclosure ? { ...this.enclosure } : null,
|
||||
guid: this.guid,
|
||||
pubDate: this.pubDate,
|
||||
chapters: this.chapters.map(ch => ({ ...ch })),
|
||||
audioFile: this.audioFile.toJSON(),
|
||||
audioFile: this.audioFile?.toJSON() || null,
|
||||
publishedAt: this.publishedAt,
|
||||
addedAt: this.addedAt,
|
||||
updatedAt: this.updatedAt
|
||||
|
|
@ -93,10 +96,11 @@ class PodcastEpisode {
|
|||
subtitle: this.subtitle,
|
||||
description: this.description,
|
||||
enclosure: this.enclosure ? { ...this.enclosure } : null,
|
||||
guid: this.guid,
|
||||
pubDate: this.pubDate,
|
||||
chapters: this.chapters.map(ch => ({ ...ch })),
|
||||
audioFile: this.audioFile.toJSON(),
|
||||
audioTrack: this.audioTrack.toJSON(),
|
||||
audioFile: this.audioFile?.toJSON() || null,
|
||||
audioTrack: this.audioTrack?.toJSON() || null,
|
||||
publishedAt: this.publishedAt,
|
||||
addedAt: this.addedAt,
|
||||
updatedAt: this.updatedAt,
|
||||
|
|
@ -106,6 +110,7 @@ class PodcastEpisode {
|
|||
}
|
||||
|
||||
get audioTrack() {
|
||||
if (!this.audioFile) return null
|
||||
const audioTrack = new AudioTrack()
|
||||
audioTrack.setData(this.libraryItemId, this.audioFile, 0)
|
||||
return audioTrack
|
||||
|
|
@ -114,9 +119,9 @@ class PodcastEpisode {
|
|||
return [this.audioTrack]
|
||||
}
|
||||
get duration() {
|
||||
return this.audioFile.duration
|
||||
return this.audioFile?.duration || 0
|
||||
}
|
||||
get size() { return this.audioFile.metadata.size }
|
||||
get size() { return this.audioFile?.metadata.size || 0 }
|
||||
get enclosureUrl() {
|
||||
return this.enclosure?.url || null
|
||||
}
|
||||
|
|
@ -133,6 +138,7 @@ class PodcastEpisode {
|
|||
this.pubDate = data.pubDate || ''
|
||||
this.description = data.description || ''
|
||||
this.enclosure = data.enclosure ? { ...data.enclosure } : null
|
||||
this.guid = data.guid || null
|
||||
this.season = data.season || ''
|
||||
this.episode = data.episode || ''
|
||||
this.episodeType = data.episodeType || 'full'
|
||||
|
|
@ -141,19 +147,6 @@ class PodcastEpisode {
|
|||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
setDataFromAudioFile(audioFile, index) {
|
||||
this.id = uuidv4()
|
||||
this.audioFile = audioFile
|
||||
this.title = Path.basename(audioFile.metadata.filename, Path.extname(audioFile.metadata.filename))
|
||||
this.index = index
|
||||
|
||||
this.setDataFromAudioMetaTags(audioFile.metaTags, true)
|
||||
|
||||
this.chapters = audioFile.chapters?.map((c) => ({ ...c }))
|
||||
this.addedAt = Date.now()
|
||||
this.updatedAt = Date.now()
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
let hasUpdates = false
|
||||
for (const key in this.toJSON()) {
|
||||
|
|
@ -187,80 +180,5 @@ class PodcastEpisode {
|
|||
if (!this.enclosure || !this.enclosure.url) return false
|
||||
return this.enclosure.url == url
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
return cleanStringForSearch(this.title).includes(query)
|
||||
}
|
||||
|
||||
setDataFromAudioMetaTags(audioFileMetaTags, overrideExistingDetails = false) {
|
||||
if (!audioFileMetaTags) return false
|
||||
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComment',
|
||||
altTag: 'tagSubtitle',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'pubDate'
|
||||
},
|
||||
{
|
||||
tag: 'tagDisc',
|
||||
key: 'season',
|
||||
},
|
||||
{
|
||||
tag: 'tagTrack',
|
||||
altTag: 'tagSeriesPart',
|
||||
key: 'episode'
|
||||
},
|
||||
{
|
||||
tag: 'tagTitle',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagEpisodeType',
|
||||
key: 'episodeType'
|
||||
}
|
||||
]
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
tagToUse = mapping.altTag
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'pubDate' && (!this.pubDate || overrideExistingDetails)) {
|
||||
const pubJsDate = new Date(value)
|
||||
if (pubJsDate && !isNaN(pubJsDate)) {
|
||||
this.publishedAt = pubJsDate.valueOf()
|
||||
this.pubDate = value
|
||||
Logger.debug(`[PodcastEpisode] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${this[mapping.key]}`)
|
||||
} else {
|
||||
Logger.warn(`[PodcastEpisode] Mapping pubDate with tag ${tagToUse} has invalid date "${value}"`)
|
||||
}
|
||||
} else if (mapping.key === 'episodeType' && (!this.episodeType || overrideExistingDetails)) {
|
||||
if (['full', 'trailer', 'bonus'].includes(value)) {
|
||||
this.episodeType = value
|
||||
Logger.debug(`[PodcastEpisode] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${this[mapping.key]}`)
|
||||
} else {
|
||||
Logger.warn(`[PodcastEpisode] Mapping episodeType with invalid value "${value}". Must be one of [full, trailer, bonus].`)
|
||||
}
|
||||
} else if (!this[mapping.key] || overrideExistingDetails) {
|
||||
this[mapping.key] = value
|
||||
Logger.debug(`[PodcastEpisode] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${this[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = PodcastEpisode
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const BookMetadata = require('../metadata/BookMetadata')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch } = require('../../utils/index')
|
||||
const { parseOpfMetadataXML } = require('../../utils/parsers/parseOpfMetadata')
|
||||
const { parseOverdriveMediaMarkersAsChapters } = require('../../utils/parsers/parseOverdriveMediaMarkers')
|
||||
const abmetadataGenerator = require('../../utils/generators/abmetadataGenerator')
|
||||
const { readTextFile, filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
const { areEquivalent, copyValue } = require('../../utils/index')
|
||||
const { filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
const AudioFile = require('../files/AudioFile')
|
||||
const AudioTrack = require('../files/AudioTrack')
|
||||
const EBookFile = require('../files/EBookFile')
|
||||
|
|
@ -98,7 +94,7 @@ class Book {
|
|||
return {
|
||||
tags: [...this.tags],
|
||||
chapters: this.chapters.map(c => ({ ...c })),
|
||||
metadata: this.metadata.toJSONForMetadataFile()
|
||||
...this.metadata.toJSONForMetadataFile()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -113,23 +109,12 @@ class Book {
|
|||
get hasMediaEntities() {
|
||||
return !!this.tracks.length || this.ebookFile
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
if (this.coverPath) return false
|
||||
if (!this.lastCoverSearch || this.metadata.coverSearchQuery !== this.lastCoverSearchQuery) return true
|
||||
return (Date.now() - this.lastCoverSearch) > 1000 * 60 * 60 * 24 * 7 // 7 day
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return this.audioFiles.some(af => af.embeddedCoverArt)
|
||||
}
|
||||
get invalidAudioFiles() {
|
||||
return this.audioFiles.filter(af => af.invalid)
|
||||
}
|
||||
get includedAudioFiles() {
|
||||
return this.audioFiles.filter(af => !af.exclude && !af.invalid)
|
||||
}
|
||||
get hasIssues() {
|
||||
return this.missingParts.length || this.invalidAudioFiles.length
|
||||
}
|
||||
get tracks() {
|
||||
let startOffset = 0
|
||||
return this.includedAudioFiles.map((af) => {
|
||||
|
|
@ -228,159 +213,6 @@ class Book {
|
|||
return null
|
||||
}
|
||||
|
||||
updateLastCoverSearch(coverWasFound) {
|
||||
this.lastCoverSearch = coverWasFound ? null : Date.now()
|
||||
this.lastCoverSearchQuery = coverWasFound ? null : this.metadata.coverSearchQuery
|
||||
}
|
||||
|
||||
// Audio file metadata tags map to book details (will not overwrite)
|
||||
setMetadataFromAudioFile(overrideExistingDetails = false) {
|
||||
if (!this.audioFiles.length) return false
|
||||
var audioFile = this.audioFiles[0]
|
||||
if (!audioFile.metaTags) return false
|
||||
return this.metadata.setDataFromAudioMetaTags(audioFile.metaTags, overrideExistingDetails)
|
||||
}
|
||||
|
||||
setData(mediaPayload) {
|
||||
this.metadata = new BookMetadata()
|
||||
if (mediaPayload.metadata) {
|
||||
this.metadata.setData(mediaPayload.metadata)
|
||||
}
|
||||
}
|
||||
|
||||
// Look for desc.txt, reader.txt, metadata.abs and opf file then update details if found
|
||||
async syncMetadataFiles(textMetadataFiles, opfMetadataOverrideDetails) {
|
||||
let metadataUpdatePayload = {}
|
||||
let hasUpdated = false
|
||||
|
||||
const descTxt = textMetadataFiles.find(lf => lf.metadata.filename === 'desc.txt')
|
||||
if (descTxt) {
|
||||
const descriptionText = await readTextFile(descTxt.metadata.path)
|
||||
if (descriptionText) {
|
||||
Logger.debug(`[Book] "${this.metadata.title}" found desc.txt updating description with "${descriptionText.slice(0, 20)}..."`)
|
||||
metadataUpdatePayload.description = descriptionText
|
||||
}
|
||||
}
|
||||
const readerTxt = textMetadataFiles.find(lf => lf.metadata.filename === 'reader.txt')
|
||||
if (readerTxt) {
|
||||
const narratorText = await readTextFile(readerTxt.metadata.path)
|
||||
if (narratorText) {
|
||||
Logger.debug(`[Book] "${this.metadata.title}" found reader.txt updating narrator with "${narratorText}"`)
|
||||
metadataUpdatePayload.narrators = this.metadata.parseNarratorsTag(narratorText)
|
||||
}
|
||||
}
|
||||
|
||||
const metadataIsJSON = global.ServerSettings.metadataFileFormat === 'json'
|
||||
const metadataAbs = textMetadataFiles.find(lf => lf.metadata.filename === 'metadata.abs')
|
||||
const metadataJson = textMetadataFiles.find(lf => lf.metadata.filename === 'metadata.json')
|
||||
|
||||
const metadataFile = metadataIsJSON ? metadataJson : metadataAbs
|
||||
if (metadataFile) {
|
||||
Logger.debug(`[Book] Found ${metadataFile.metadata.filename} file for "${this.metadata.title}"`)
|
||||
const metadataText = await readTextFile(metadataFile.metadata.path)
|
||||
const abmetadataUpdates = abmetadataGenerator.parseAndCheckForUpdates(metadataText, this, 'book', metadataIsJSON)
|
||||
if (abmetadataUpdates && Object.keys(abmetadataUpdates).length) {
|
||||
Logger.debug(`[Book] "${this.metadata.title}" changes found in metadata.abs file`, abmetadataUpdates)
|
||||
|
||||
if (abmetadataUpdates.tags) { // Set media tags if updated
|
||||
this.tags = abmetadataUpdates.tags
|
||||
hasUpdated = true
|
||||
}
|
||||
if (abmetadataUpdates.chapters) { // Set chapters if updated
|
||||
this.chapters = abmetadataUpdates.chapters
|
||||
hasUpdated = true
|
||||
}
|
||||
if (abmetadataUpdates.metadata) {
|
||||
metadataUpdatePayload = {
|
||||
...metadataUpdatePayload,
|
||||
...abmetadataUpdates.metadata
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (metadataAbs || metadataJson) { // Has different metadata file format so mark as updated
|
||||
Logger.debug(`[Book] Found different format metadata file ${(metadataAbs || metadataJson).metadata.filename}, expecting .${global.ServerSettings.metadataFileFormat} for "${this.metadata.title}"`)
|
||||
hasUpdated = true
|
||||
}
|
||||
|
||||
const metadataOpf = textMetadataFiles.find(lf => lf.isOPFFile || lf.metadata.filename === 'metadata.xml')
|
||||
if (metadataOpf) {
|
||||
const xmlText = await readTextFile(metadataOpf.metadata.path)
|
||||
if (xmlText) {
|
||||
const opfMetadata = await parseOpfMetadataXML(xmlText)
|
||||
if (opfMetadata) {
|
||||
for (const key in opfMetadata) {
|
||||
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length && (!this.tags.length || opfMetadataOverrideDetails)) {
|
||||
this.tags = opfMetadata.tags
|
||||
hasUpdated = true
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length && (!this.metadata.genres.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload[key] = opfMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (opfMetadata.authors && opfMetadata.authors.length && (!this.metadata.authors.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload.authors = opfMetadata.authors.map(authorName => {
|
||||
return {
|
||||
id: `new-${Math.floor(Math.random() * 1000000)}`,
|
||||
name: authorName
|
||||
}
|
||||
})
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (opfMetadata.narrators?.length && (!this.metadata.narrators.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload.narrators = opfMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (opfMetadata.series && (!this.metadata.series.length || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload.series = this.metadata.parseSeriesTag(opfMetadata.series, opfMetadata.sequence)
|
||||
}
|
||||
} else if (opfMetadata[key] && ((!this.metadata[key] && !metadataUpdatePayload[key]) || opfMetadataOverrideDetails)) {
|
||||
metadataUpdatePayload[key] = opfMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataUpdatePayload).length) {
|
||||
return this.metadata.update(metadataUpdatePayload) || hasUpdated
|
||||
}
|
||||
return hasUpdated
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
const payload = {
|
||||
tags: this.tags.filter(t => cleanStringForSearch(t).includes(query)),
|
||||
series: this.metadata.searchSeries(query),
|
||||
authors: this.metadata.searchAuthors(query),
|
||||
narrators: this.metadata.searchNarrators(query),
|
||||
matchKey: null,
|
||||
matchText: null
|
||||
}
|
||||
const metadataMatch = this.metadata.searchQuery(query)
|
||||
if (metadataMatch) {
|
||||
payload.matchKey = metadataMatch.matchKey
|
||||
payload.matchText = metadataMatch.matchText
|
||||
} else {
|
||||
if (payload.authors.length) {
|
||||
payload.matchKey = 'authors'
|
||||
payload.matchText = this.metadata.authorName
|
||||
} else if (payload.series.length) {
|
||||
payload.matchKey = 'series'
|
||||
payload.matchText = this.metadata.seriesName
|
||||
} else if (payload.tags.length) {
|
||||
payload.matchKey = 'tags'
|
||||
payload.matchText = this.tags.join(', ')
|
||||
} else if (payload.narrators.length) {
|
||||
payload.matchKey = 'narrators'
|
||||
payload.matchText = this.metadata.narratorName
|
||||
}
|
||||
}
|
||||
return payload
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the EBookFile from a LibraryFile
|
||||
* If null then ebookFile will be removed from the book
|
||||
|
|
@ -426,113 +258,6 @@ class Book {
|
|||
Logger.debug(`[Book] Tracks being rebuilt...!`)
|
||||
this.audioFiles.sort((a, b) => a.index - b.index)
|
||||
this.missingParts = []
|
||||
this.setChapters()
|
||||
this.checkUpdateMissingTracks()
|
||||
}
|
||||
|
||||
checkUpdateMissingTracks() {
|
||||
var currMissingParts = (this.missingParts || []).join(',') || ''
|
||||
|
||||
var current_index = 1
|
||||
var missingParts = []
|
||||
|
||||
for (let i = 0; i < this.tracks.length; i++) {
|
||||
var _track = this.tracks[i]
|
||||
if (_track.index > current_index) {
|
||||
var num_parts_missing = _track.index - current_index
|
||||
for (let x = 0; x < num_parts_missing && x < 9999; x++) {
|
||||
missingParts.push(current_index + x)
|
||||
}
|
||||
}
|
||||
current_index = _track.index + 1
|
||||
}
|
||||
|
||||
this.missingParts = missingParts
|
||||
|
||||
var newMissingParts = (this.missingParts || []).join(',') || ''
|
||||
var wasUpdated = newMissingParts !== currMissingParts
|
||||
if (wasUpdated && this.missingParts.length) {
|
||||
Logger.info(`[Audiobook] "${this.metadata.title}" has ${missingParts.length} missing parts`)
|
||||
}
|
||||
|
||||
return wasUpdated
|
||||
}
|
||||
|
||||
setChapters() {
|
||||
const preferOverdriveMediaMarker = !!global.ServerSettings.scannerPreferOverdriveMediaMarker
|
||||
|
||||
// If 1 audio file without chapters, then no chapters will be set
|
||||
const includedAudioFiles = this.audioFiles.filter(af => !af.exclude)
|
||||
if (!includedAudioFiles.length) return
|
||||
|
||||
// If overdrive media markers are present and preferred, use those instead
|
||||
if (preferOverdriveMediaMarker) {
|
||||
const overdriveChapters = parseOverdriveMediaMarkersAsChapters(includedAudioFiles)
|
||||
if (overdriveChapters) {
|
||||
Logger.info('[Book] Overdrive Media Markers and preference found! Using these for chapter definitions')
|
||||
this.chapters = overdriveChapters
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// If first audio file has embedded chapters then use embedded chapters
|
||||
if (includedAudioFiles[0].chapters?.length) {
|
||||
// If all files chapters are the same, then only make chapters for the first file
|
||||
if (
|
||||
includedAudioFiles.length === 1 ||
|
||||
includedAudioFiles.length > 1 &&
|
||||
includedAudioFiles[0].chapters.length === includedAudioFiles[1].chapters?.length &&
|
||||
includedAudioFiles[0].chapters.every((c, i) => c.title === includedAudioFiles[1].chapters[i].title)
|
||||
) {
|
||||
Logger.debug(`[Book] setChapters: Using embedded chapters in first audio file ${includedAudioFiles[0].metadata?.path}`)
|
||||
this.chapters = includedAudioFiles[0].chapters.map((c) => ({ ...c }))
|
||||
} else {
|
||||
Logger.debug(`[Book] setChapters: Using embedded chapters from all audio files ${includedAudioFiles[0].metadata?.path}`)
|
||||
this.chapters = []
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
|
||||
includedAudioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
const chapters = file.chapters?.map((c) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime,
|
||||
})) ?? []
|
||||
this.chapters = this.chapters.concat(chapters)
|
||||
|
||||
currChapterId += file.chapters?.length ?? 0
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
} else if (includedAudioFiles.length > 1) {
|
||||
const preferAudioMetadata = !!global.ServerSettings.scannerPreferAudioMetadata
|
||||
|
||||
// Build chapters from audio files
|
||||
this.chapters = []
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
includedAudioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
let title = file.metadata.filename ? Path.basename(file.metadata.filename, Path.extname(file.metadata.filename)) : `Chapter ${currChapterId}`
|
||||
|
||||
// When prefer audio metadata server setting is set then use ID3 title tag as long as it is not the same as the book title
|
||||
if (preferAudioMetadata && file.metaTags?.tagTitle && file.metaTags?.tagTitle !== this.metadata.title) {
|
||||
title = file.metaTags.tagTitle
|
||||
}
|
||||
|
||||
this.chapters.push({
|
||||
id: currChapterId++,
|
||||
start: currStartTime,
|
||||
end: currStartTime + file.duration,
|
||||
title
|
||||
})
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Only checks container format
|
||||
|
|
|
|||
|
|
@ -65,15 +65,6 @@ class Music {
|
|||
get hasMediaEntities() {
|
||||
return !!this.audioFile
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
return false
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return this.audioFile.embeddedCoverArt
|
||||
}
|
||||
get hasIssues() {
|
||||
return false
|
||||
}
|
||||
get duration() {
|
||||
return this.audioFile.duration || 0
|
||||
}
|
||||
|
|
@ -134,20 +125,6 @@ class Music {
|
|||
this.audioFile = audioFile
|
||||
}
|
||||
|
||||
setMetadataFromAudioFile(overrideExistingDetails = false) {
|
||||
if (!this.audioFile) return false
|
||||
if (!this.audioFile.metaTags) return false
|
||||
return this.metadata.setDataFromAudioMetaTags(this.audioFile.metaTags, overrideExistingDetails)
|
||||
}
|
||||
|
||||
syncMetadataFiles(textMetadataFiles, opfMetadataOverrideDetails) {
|
||||
return false
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
return {}
|
||||
}
|
||||
|
||||
// Only checks container format
|
||||
checkCanDirectPlay(payload) {
|
||||
return true
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
const Logger = require('../../Logger')
|
||||
const PodcastEpisode = require('../entities/PodcastEpisode')
|
||||
const PodcastMetadata = require('../metadata/PodcastMetadata')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch } = require('../../utils/index')
|
||||
const abmetadataGenerator = require('../../utils/generators/abmetadataGenerator')
|
||||
const { readTextFile, filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
const { areEquivalent, copyValue } = require('../../utils/index')
|
||||
const { filePathToPOSIX } = require('../../utils/fileUtils')
|
||||
|
||||
class Podcast {
|
||||
constructor(podcast) {
|
||||
|
|
@ -98,7 +97,19 @@ class Podcast {
|
|||
toJSONForMetadataFile() {
|
||||
return {
|
||||
tags: [...this.tags],
|
||||
metadata: this.metadata.toJSON()
|
||||
title: this.metadata.title,
|
||||
author: this.metadata.author,
|
||||
description: this.metadata.description,
|
||||
releaseDate: this.metadata.releaseDate,
|
||||
genres: [...this.metadata.genres],
|
||||
feedURL: this.metadata.feedUrl,
|
||||
imageURL: this.metadata.imageUrl,
|
||||
itunesPageURL: this.metadata.itunesPageUrl,
|
||||
itunesId: this.metadata.itunesId,
|
||||
itunesArtistId: this.metadata.itunesArtistId,
|
||||
explicit: this.metadata.explicit,
|
||||
language: this.metadata.language,
|
||||
podcastType: this.metadata.type
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -110,15 +121,6 @@ class Podcast {
|
|||
get hasMediaEntities() {
|
||||
return !!this.episodes.length
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
return false
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return this.episodes.some(ep => ep.audioFile.embeddedCoverArt)
|
||||
}
|
||||
get hasIssues() {
|
||||
return false
|
||||
}
|
||||
get duration() {
|
||||
let total = 0
|
||||
this.episodes.forEach((ep) => total += ep.duration)
|
||||
|
|
@ -187,10 +189,6 @@ class Podcast {
|
|||
return null
|
||||
}
|
||||
|
||||
findEpisodeWithInode(inode) {
|
||||
return this.episodes.find(ep => ep.audioFile.ino === inode)
|
||||
}
|
||||
|
||||
setData(mediaData) {
|
||||
this.metadata = new PodcastMetadata()
|
||||
if (mediaData.metadata) {
|
||||
|
|
@ -203,62 +201,6 @@ class Podcast {
|
|||
this.lastEpisodeCheck = Date.now() // Makes sure new episodes are after this
|
||||
}
|
||||
|
||||
async syncMetadataFiles(textMetadataFiles, opfMetadataOverrideDetails) {
|
||||
let metadataUpdatePayload = {}
|
||||
let tagsUpdated = false
|
||||
|
||||
const metadataAbs = textMetadataFiles.find(lf => lf.metadata.filename === 'metadata.abs' || lf.metadata.filename === 'metadata.json')
|
||||
if (metadataAbs) {
|
||||
const isJSON = metadataAbs.metadata.filename === 'metadata.json'
|
||||
const metadataText = await readTextFile(metadataAbs.metadata.path)
|
||||
const abmetadataUpdates = abmetadataGenerator.parseAndCheckForUpdates(metadataText, this, 'podcast', isJSON)
|
||||
if (abmetadataUpdates && Object.keys(abmetadataUpdates).length) {
|
||||
Logger.debug(`[Podcast] "${this.metadata.title}" changes found in metadata.abs file`, abmetadataUpdates)
|
||||
|
||||
if (abmetadataUpdates.tags) { // Set media tags if updated
|
||||
this.tags = abmetadataUpdates.tags
|
||||
tagsUpdated = true
|
||||
}
|
||||
if (abmetadataUpdates.metadata) {
|
||||
metadataUpdatePayload = {
|
||||
...metadataUpdatePayload,
|
||||
...abmetadataUpdates.metadata
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataUpdatePayload).length) {
|
||||
return this.metadata.update(metadataUpdatePayload) || tagsUpdated
|
||||
}
|
||||
return tagsUpdated
|
||||
}
|
||||
|
||||
searchEpisodes(query) {
|
||||
return this.episodes.filter(ep => ep.searchQuery(query))
|
||||
}
|
||||
|
||||
searchQuery(query) {
|
||||
const payload = {
|
||||
tags: this.tags.filter(t => cleanStringForSearch(t).includes(query)),
|
||||
matchKey: null,
|
||||
matchText: null
|
||||
}
|
||||
const metadataMatch = this.metadata.searchQuery(query)
|
||||
if (metadataMatch) {
|
||||
payload.matchKey = metadataMatch.matchKey
|
||||
payload.matchText = metadataMatch.matchText
|
||||
} else {
|
||||
const matchingEpisodes = this.searchEpisodes(query)
|
||||
if (matchingEpisodes.length) {
|
||||
payload.matchKey = 'episode'
|
||||
payload.matchText = matchingEpisodes[0].title
|
||||
}
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
|
||||
checkHasEpisode(episodeId) {
|
||||
return this.episodes.some(ep => ep.id === episodeId)
|
||||
}
|
||||
|
|
@ -325,14 +267,6 @@ class Podcast {
|
|||
return this.episodes.find(ep => ep.id == episodeId)
|
||||
}
|
||||
|
||||
// Audio file metadata tags map to podcast details
|
||||
setMetadataFromAudioFile(overrideExistingDetails = false) {
|
||||
if (!this.episodes.length) return false
|
||||
const audioFile = this.episodes[0].audioFile
|
||||
if (!audioFile?.metaTags) return false
|
||||
return this.metadata.setDataFromAudioMetaTags(audioFile.metaTags, overrideExistingDetails)
|
||||
}
|
||||
|
||||
getChapters(episodeId) {
|
||||
return this.getEpisode(episodeId)?.chapters?.map(ch => ({ ...ch })) || []
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,15 +69,6 @@ class Video {
|
|||
get hasMediaEntities() {
|
||||
return true
|
||||
}
|
||||
get shouldSearchForCover() {
|
||||
return false
|
||||
}
|
||||
get hasEmbeddedCoverArt() {
|
||||
return false
|
||||
}
|
||||
get hasIssues() {
|
||||
return false
|
||||
}
|
||||
get duration() {
|
||||
return 0
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const parseNameString = require('../../utils/parsers/parseNameString')
|
||||
class BookMetadata {
|
||||
constructor(metadata) {
|
||||
|
|
@ -144,20 +144,6 @@ class BookMetadata {
|
|||
return `${se.name} #${se.sequence}`
|
||||
}).join(', ')
|
||||
}
|
||||
get seriesNameIgnorePrefix() {
|
||||
if (!this.series.length) return ''
|
||||
return this.series.map(se => {
|
||||
if (!se.sequence) return getTitleIgnorePrefix(se.name)
|
||||
return `${getTitleIgnorePrefix(se.name)} #${se.sequence}`
|
||||
}).join(', ')
|
||||
}
|
||||
get seriesNamePrefixAtEnd() {
|
||||
if (!this.series.length) return ''
|
||||
return this.series.map(se => {
|
||||
if (!se.sequence) return getTitlePrefixAtEnd(se.name)
|
||||
return `${getTitlePrefixAtEnd(se.name)} #${se.sequence}`
|
||||
}).join(', ')
|
||||
}
|
||||
get firstSeriesName() {
|
||||
if (!this.series.length) return ''
|
||||
return this.series[0].name
|
||||
|
|
@ -169,36 +155,15 @@ class BookMetadata {
|
|||
get narratorName() {
|
||||
return this.narrators.join(', ')
|
||||
}
|
||||
get coverSearchQuery() {
|
||||
if (!this.authorName) return this.title
|
||||
return this.title + '&' + this.authorName
|
||||
}
|
||||
|
||||
hasAuthor(id) {
|
||||
return !!this.authors.find(au => au.id == id)
|
||||
}
|
||||
hasSeries(seriesId) {
|
||||
return !!this.series.find(se => se.id == seriesId)
|
||||
}
|
||||
hasNarrator(narratorName) {
|
||||
return this.narrators.includes(narratorName)
|
||||
}
|
||||
getSeries(seriesId) {
|
||||
return this.series.find(se => se.id == seriesId)
|
||||
}
|
||||
getFirstSeries() {
|
||||
return this.series.length ? this.series[0] : null
|
||||
}
|
||||
getSeriesSequence(seriesId) {
|
||||
const series = this.series.find(se => se.id == seriesId)
|
||||
if (!series) return null
|
||||
return series.sequence || ''
|
||||
}
|
||||
getSeriesSortTitle(series) {
|
||||
if (!series) return ''
|
||||
if (!series.sequence) return series.name
|
||||
return `${series.name} #${series.sequence}`
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
const json = this.toJSON()
|
||||
|
|
@ -231,205 +196,5 @@ class BookMetadata {
|
|||
name: newAuthor.name
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update narrator name if narrator is in book
|
||||
* @param {String} oldNarratorName - Narrator name to get updated
|
||||
* @param {String} newNarratorName - Updated narrator name
|
||||
* @return {Boolean} True if narrator was updated
|
||||
*/
|
||||
updateNarrator(oldNarratorName, newNarratorName) {
|
||||
if (!this.hasNarrator(oldNarratorName)) return false
|
||||
this.narrators = this.narrators.filter(n => n !== oldNarratorName)
|
||||
if (newNarratorName && !this.hasNarrator(newNarratorName)) {
|
||||
this.narrators.push(newNarratorName)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove narrator name if narrator is in book
|
||||
* @param {String} narratorName - Narrator name to remove
|
||||
* @return {Boolean} True if narrator was updated
|
||||
*/
|
||||
removeNarrator(narratorName) {
|
||||
if (!this.hasNarrator(narratorName)) return false
|
||||
this.narrators = this.narrators.filter(n => n !== narratorName)
|
||||
return true
|
||||
}
|
||||
|
||||
setData(scanMediaData = {}) {
|
||||
this.title = scanMediaData.title || null
|
||||
this.subtitle = scanMediaData.subtitle || null
|
||||
this.narrators = this.parseNarratorsTag(scanMediaData.narrators)
|
||||
this.publishedYear = scanMediaData.publishedYear || null
|
||||
this.description = scanMediaData.description || null
|
||||
this.isbn = scanMediaData.isbn || null
|
||||
this.asin = scanMediaData.asin || null
|
||||
this.language = scanMediaData.language || null
|
||||
this.genres = []
|
||||
this.explicit = !!scanMediaData.explicit
|
||||
|
||||
if (scanMediaData.author) {
|
||||
this.authors = this.parseAuthorsTag(scanMediaData.author)
|
||||
}
|
||||
if (scanMediaData.series) {
|
||||
this.series = this.parseSeriesTag(scanMediaData.series, scanMediaData.sequence)
|
||||
}
|
||||
}
|
||||
|
||||
setDataFromAudioMetaTags(audioFileMetaTags, overrideExistingDetails = false) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComposer',
|
||||
key: 'narrators'
|
||||
},
|
||||
{
|
||||
tag: 'tagDescription',
|
||||
altTag: 'tagComment',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagPublisher',
|
||||
key: 'publisher'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'publishedYear'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagTitle',
|
||||
key: 'title',
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
altTag: 'tagAlbumArtist',
|
||||
key: 'authors'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagSeries',
|
||||
key: 'series'
|
||||
},
|
||||
{
|
||||
tag: 'tagIsbn',
|
||||
key: 'isbn'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagASIN',
|
||||
key: 'asin'
|
||||
}
|
||||
]
|
||||
|
||||
const updatePayload = {}
|
||||
|
||||
// Metadata is only mapped to the book if it is empty
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
// let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
// tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'narrators' && (!this.narrators.length || overrideExistingDetails)) {
|
||||
updatePayload.narrators = this.parseNarratorsTag(value)
|
||||
} else if (mapping.key === 'authors' && (!this.authors.length || overrideExistingDetails)) {
|
||||
updatePayload.authors = this.parseAuthorsTag(value)
|
||||
} else if (mapping.key === 'genres' && (!this.genres.length || overrideExistingDetails)) {
|
||||
updatePayload.genres = this.parseGenresTag(value)
|
||||
} else if (mapping.key === 'series' && (!this.series.length || overrideExistingDetails)) {
|
||||
const sequenceTag = audioFileMetaTags.tagSeriesPart || null
|
||||
updatePayload.series = this.parseSeriesTag(value, sequenceTag)
|
||||
} else if (!this[mapping.key] || overrideExistingDetails) {
|
||||
updatePayload[mapping.key] = value
|
||||
// Logger.debug(`[Book] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${updatePayload[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (Object.keys(updatePayload).length) {
|
||||
return this.update(updatePayload)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Returns array of names in First Last format
|
||||
parseNarratorsTag(narratorsTag) {
|
||||
const parsed = parseNameString.parse(narratorsTag)
|
||||
return parsed ? parsed.names : []
|
||||
}
|
||||
|
||||
// Return array of authors minified with placeholder id
|
||||
parseAuthorsTag(authorsTag) {
|
||||
const parsed = parseNameString.parse(authorsTag)
|
||||
if (!parsed) return []
|
||||
return (parsed.names || []).map((au) => {
|
||||
const findAuthor = this.authors.find(_au => _au.name == au)
|
||||
|
||||
return {
|
||||
id: findAuthor?.id || `new-${Math.floor(Math.random() * 1000000)}`,
|
||||
name: au
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
parseGenresTag(genreTag) {
|
||||
if (!genreTag || !genreTag.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
|
||||
// Return array with series with placeholder id
|
||||
parseSeriesTag(seriesTag, sequenceTag) {
|
||||
if (!seriesTag) return []
|
||||
return [{
|
||||
id: `new-${Math.floor(Math.random() * 1000000)}`,
|
||||
name: seriesTag,
|
||||
sequence: sequenceTag || ''
|
||||
}]
|
||||
}
|
||||
|
||||
searchSeries(query) {
|
||||
return this.series.filter(se => cleanStringForSearch(se.name).includes(query))
|
||||
}
|
||||
searchAuthors(query) {
|
||||
return this.authors.filter(au => cleanStringForSearch(au.name).includes(query))
|
||||
}
|
||||
searchNarrators(query) {
|
||||
return this.narrators.filter(n => cleanStringForSearch(n).includes(query))
|
||||
}
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
const keysToCheck = ['title', 'asin', 'isbn', 'subtitle']
|
||||
for (const key of keysToCheck) {
|
||||
if (this[key] && cleanStringForSearch(String(this[key])).includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
module.exports = BookMetadata
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
|
||||
class MusicMetadata {
|
||||
constructor(metadata) {
|
||||
|
|
@ -133,19 +133,6 @@ class MusicMetadata {
|
|||
return getTitlePrefixAtEnd(this.title)
|
||||
}
|
||||
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
const keysToCheck = ['title', 'album']
|
||||
for (const key of keysToCheck) {
|
||||
if (this[key] && cleanStringForSearch(String(this[key])).includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
setData(mediaMetadata = {}) {
|
||||
this.title = mediaMetadata.title || null
|
||||
this.artist = mediaMetadata.artist || null
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, cleanStringForSearch, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
|
||||
|
||||
class PodcastMetadata {
|
||||
constructor(metadata) {
|
||||
|
|
@ -91,19 +91,6 @@ class PodcastMetadata {
|
|||
return getTitlePrefixAtEnd(this.title)
|
||||
}
|
||||
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
const keysToCheck = ['title', 'author', 'itunesId', 'itunesArtistId']
|
||||
for (const key of keysToCheck) {
|
||||
if (this[key] && cleanStringForSearch(String(this[key])).includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
setData(mediaMetadata = {}) {
|
||||
this.title = mediaMetadata.title || null
|
||||
this.author = mediaMetadata.author || null
|
||||
|
|
@ -136,74 +123,5 @@ class PodcastMetadata {
|
|||
}
|
||||
return hasUpdates
|
||||
}
|
||||
|
||||
setDataFromAudioMetaTags(audioFileMetaTags, overrideExistingDetails = false) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagSeries',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
key: 'author'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagItunesId',
|
||||
key: 'itunesId'
|
||||
},
|
||||
{
|
||||
tag: 'tagPodcastType',
|
||||
key: 'type',
|
||||
}
|
||||
]
|
||||
|
||||
const updatePayload = {}
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'genres' && (!this.genres.length || overrideExistingDetails)) {
|
||||
updatePayload.genres = this.parseGenresTag(value)
|
||||
Logger.debug(`[Podcast] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${updatePayload.genres.join(', ')}`)
|
||||
} else if (!this[mapping.key] || overrideExistingDetails) {
|
||||
updatePayload[mapping.key] = value
|
||||
Logger.debug(`[Podcast] Mapping metadata to key ${tagToUse} => ${mapping.key}: ${updatePayload[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (Object.keys(updatePayload).length) {
|
||||
return this.update(updatePayload)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
parseGenresTag(genreTag) {
|
||||
if (!genreTag || !genreTag.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
}
|
||||
module.exports = PodcastMetadata
|
||||
|
|
|
|||
|
|
@ -55,19 +55,6 @@ class VideoMetadata {
|
|||
return getTitlePrefixAtEnd(this.title)
|
||||
}
|
||||
|
||||
searchQuery(query) { // Returns key if match is found
|
||||
var keysToCheck = ['title']
|
||||
for (var key of keysToCheck) {
|
||||
if (this[key] && String(this[key]).toLowerCase().includes(query)) {
|
||||
return {
|
||||
matchKey: key,
|
||||
matchText: this[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
setData(mediaMetadata = {}) {
|
||||
this.title = mediaMetadata.title || null
|
||||
this.description = mediaMetadata.description || null
|
||||
|
|
|
|||
|
|
@ -1,6 +1,14 @@
|
|||
const Logger = require('../../Logger')
|
||||
const { areEquivalent, copyValue, isNullOrNaN } = require('../../utils')
|
||||
|
||||
/**
|
||||
* @typedef EreaderDeviceObject
|
||||
* @property {string} name
|
||||
* @property {string} email
|
||||
* @property {string} availabilityOption
|
||||
* @property {string[]} users
|
||||
*/
|
||||
|
||||
// REF: https://nodemailer.com/smtp/
|
||||
class EmailSettings {
|
||||
constructor(settings = null) {
|
||||
|
|
@ -13,7 +21,7 @@ class EmailSettings {
|
|||
this.testAddress = null
|
||||
this.fromAddress = null
|
||||
|
||||
// Array of { name:String, email:String }
|
||||
/** @type {EreaderDeviceObject[]} */
|
||||
this.ereaderDevices = []
|
||||
|
||||
if (settings) {
|
||||
|
|
@ -57,6 +65,26 @@ class EmailSettings {
|
|||
|
||||
if (payload.ereaderDevices !== undefined && !Array.isArray(payload.ereaderDevices)) payload.ereaderDevices = undefined
|
||||
|
||||
if (payload.ereaderDevices?.length) {
|
||||
// Validate ereader devices
|
||||
payload.ereaderDevices = payload.ereaderDevices.map((device) => {
|
||||
if (!device.name || !device.email) {
|
||||
Logger.error(`[EmailSettings] Update ereader device is invalid`, device)
|
||||
return null
|
||||
}
|
||||
if (!device.availabilityOption || !['adminOrUp', 'userOrUp', 'guestOrUp', 'specificUsers'].includes(device.availabilityOption)) {
|
||||
device.availabilityOption = 'adminOrUp'
|
||||
}
|
||||
if (device.availabilityOption === 'specificUsers' && !device.users?.length) {
|
||||
device.availabilityOption = 'adminOrUp'
|
||||
}
|
||||
if (device.availabilityOption !== 'specificUsers' && device.users?.length) {
|
||||
device.users = []
|
||||
}
|
||||
return device
|
||||
}).filter(d => d)
|
||||
}
|
||||
|
||||
let hasUpdates = false
|
||||
|
||||
const json = this.toJSON()
|
||||
|
|
@ -88,15 +116,40 @@ class EmailSettings {
|
|||
return payload
|
||||
}
|
||||
|
||||
getEReaderDevices(user) {
|
||||
// Only accessible to admin or up
|
||||
if (!user.isAdminOrUp) {
|
||||
return []
|
||||
/**
|
||||
*
|
||||
* @param {EreaderDeviceObject} device
|
||||
* @param {import('../user/User')} user
|
||||
* @returns {boolean}
|
||||
*/
|
||||
checkUserCanAccessDevice(device, user) {
|
||||
let deviceAvailability = device.availabilityOption || 'adminOrUp'
|
||||
if (deviceAvailability === 'adminOrUp' && user.isAdminOrUp) return true
|
||||
if (deviceAvailability === 'userOrUp' && (user.isAdminOrUp || user.isUser)) return true
|
||||
if (deviceAvailability === 'guestOrUp') return true
|
||||
if (deviceAvailability === 'specificUsers') {
|
||||
let deviceUsers = device.users || []
|
||||
return deviceUsers.includes(user.id)
|
||||
}
|
||||
|
||||
return this.ereaderDevices.map(d => ({ ...d }))
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ereader devices accessible to user
|
||||
*
|
||||
* @param {import('../user/User')} user
|
||||
* @returns {EreaderDeviceObject[]}
|
||||
*/
|
||||
getEReaderDevices(user) {
|
||||
return this.ereaderDevices.filter((device) => this.checkUserCanAccessDevice(device, user))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ereader device by name
|
||||
*
|
||||
* @param {string} deviceName
|
||||
* @returns {EreaderDeviceObject}
|
||||
*/
|
||||
getEReaderDevice(deviceName) {
|
||||
return this.ereaderDevices.find(d => d.name === deviceName)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ class LibrarySettings {
|
|||
this.autoScanCronExpression = null
|
||||
this.audiobooksOnly = false
|
||||
this.hideSingleBookSeries = false // Do not show series that only have 1 book
|
||||
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
|
||||
if (settings) {
|
||||
this.construct(settings)
|
||||
|
|
@ -23,6 +24,12 @@ class LibrarySettings {
|
|||
this.autoScanCronExpression = settings.autoScanCronExpression || null
|
||||
this.audiobooksOnly = !!settings.audiobooksOnly
|
||||
this.hideSingleBookSeries = !!settings.hideSingleBookSeries
|
||||
if (settings.metadataPrecedence) {
|
||||
this.metadataPrecedence = [...settings.metadataPrecedence]
|
||||
} else {
|
||||
// Added in v2.4.5
|
||||
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
|
|
@ -33,14 +40,20 @@ class LibrarySettings {
|
|||
skipMatchingMediaWithIsbn: this.skipMatchingMediaWithIsbn,
|
||||
autoScanCronExpression: this.autoScanCronExpression,
|
||||
audiobooksOnly: this.audiobooksOnly,
|
||||
hideSingleBookSeries: this.hideSingleBookSeries
|
||||
hideSingleBookSeries: this.hideSingleBookSeries,
|
||||
metadataPrecedence: [...this.metadataPrecedence]
|
||||
}
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
let hasUpdates = false
|
||||
for (const key in payload) {
|
||||
if (this[key] !== payload[key]) {
|
||||
if (key === 'metadataPrecedence') {
|
||||
if (payload[key] && Array.isArray(payload[key]) && payload[key].join() !== this[key].join()) {
|
||||
this[key] = payload[key]
|
||||
hasUpdates = true
|
||||
}
|
||||
} else if (this[key] !== payload[key]) {
|
||||
this[key] = payload[key]
|
||||
hasUpdates = true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const packageJson = require('../../../package.json')
|
||||
const { BookshelfView } = require('../../utils/constants')
|
||||
const Logger = require('../../Logger')
|
||||
|
||||
|
|
@ -10,11 +11,8 @@ class ServerSettings {
|
|||
this.scannerParseSubtitle = false
|
||||
this.scannerFindCovers = false
|
||||
this.scannerCoverProvider = 'google'
|
||||
this.scannerPreferAudioMetadata = false
|
||||
this.scannerPreferOpfMetadata = false
|
||||
this.scannerPreferMatchedMetadata = false
|
||||
this.scannerDisableWatcher = false
|
||||
this.scannerPreferOverdriveMediaMarker = false
|
||||
|
||||
// Metadata - choose to store inside users library item folder
|
||||
this.storeCoverWithItem = false
|
||||
|
|
@ -53,7 +51,27 @@ class ServerSettings {
|
|||
|
||||
this.logLevel = Logger.logLevel
|
||||
|
||||
this.version = null
|
||||
this.version = packageJson.version
|
||||
this.buildNumber = packageJson.buildNumber
|
||||
|
||||
// Auth settings
|
||||
// Active auth methodes
|
||||
this.authActiveAuthMethods = ['local']
|
||||
|
||||
// openid settings
|
||||
this.authOpenIDIssuerURL = null
|
||||
this.authOpenIDAuthorizationURL = null
|
||||
this.authOpenIDTokenURL = null
|
||||
this.authOpenIDUserInfoURL = null
|
||||
this.authOpenIDJwksURL = null
|
||||
this.authOpenIDLogoutURL = null
|
||||
this.authOpenIDClientID = null
|
||||
this.authOpenIDClientSecret = null
|
||||
this.authOpenIDButtonText = 'Login with OpenId'
|
||||
this.authOpenIDAutoLaunch = false
|
||||
this.authOpenIDAutoRegister = false
|
||||
this.authOpenIDMatchExistingBy = null
|
||||
this.authOpenIDMobileRedirectURIs = ['audiobookshelf://oauth']
|
||||
|
||||
if (settings) {
|
||||
this.construct(settings)
|
||||
|
|
@ -65,11 +83,8 @@ class ServerSettings {
|
|||
this.scannerFindCovers = !!settings.scannerFindCovers
|
||||
this.scannerCoverProvider = settings.scannerCoverProvider || 'google'
|
||||
this.scannerParseSubtitle = settings.scannerParseSubtitle
|
||||
this.scannerPreferAudioMetadata = !!settings.scannerPreferAudioMetadata
|
||||
this.scannerPreferOpfMetadata = !!settings.scannerPreferOpfMetadata
|
||||
this.scannerPreferMatchedMetadata = !!settings.scannerPreferMatchedMetadata
|
||||
this.scannerDisableWatcher = !!settings.scannerDisableWatcher
|
||||
this.scannerPreferOverdriveMediaMarker = !!settings.scannerPreferOverdriveMediaMarker
|
||||
|
||||
this.storeCoverWithItem = !!settings.storeCoverWithItem
|
||||
this.storeMetadataWithItem = !!settings.storeMetadataWithItem
|
||||
|
|
@ -96,6 +111,38 @@ class ServerSettings {
|
|||
this.language = settings.language || 'en-us'
|
||||
this.logLevel = settings.logLevel || Logger.logLevel
|
||||
this.version = settings.version || null
|
||||
this.buildNumber = settings.buildNumber || 0 // Added v2.4.5
|
||||
|
||||
this.authActiveAuthMethods = settings.authActiveAuthMethods || ['local']
|
||||
|
||||
this.authOpenIDIssuerURL = settings.authOpenIDIssuerURL || null
|
||||
this.authOpenIDAuthorizationURL = settings.authOpenIDAuthorizationURL || null
|
||||
this.authOpenIDTokenURL = settings.authOpenIDTokenURL || null
|
||||
this.authOpenIDUserInfoURL = settings.authOpenIDUserInfoURL || null
|
||||
this.authOpenIDJwksURL = settings.authOpenIDJwksURL || null
|
||||
this.authOpenIDLogoutURL = settings.authOpenIDLogoutURL || null
|
||||
this.authOpenIDClientID = settings.authOpenIDClientID || null
|
||||
this.authOpenIDClientSecret = settings.authOpenIDClientSecret || null
|
||||
this.authOpenIDButtonText = settings.authOpenIDButtonText || 'Login with OpenId'
|
||||
this.authOpenIDAutoLaunch = !!settings.authOpenIDAutoLaunch
|
||||
this.authOpenIDAutoRegister = !!settings.authOpenIDAutoRegister
|
||||
this.authOpenIDMatchExistingBy = settings.authOpenIDMatchExistingBy || null
|
||||
this.authOpenIDMobileRedirectURIs = settings.authOpenIDMobileRedirectURIs || ['audiobookshelf://oauth']
|
||||
|
||||
if (!Array.isArray(this.authActiveAuthMethods)) {
|
||||
this.authActiveAuthMethods = ['local']
|
||||
}
|
||||
|
||||
// remove uninitialized methods
|
||||
// OpenID
|
||||
if (this.authActiveAuthMethods.includes('openid') && !this.isOpenIDAuthSettingsValid) {
|
||||
this.authActiveAuthMethods.splice(this.authActiveAuthMethods.indexOf('openid', 0), 1)
|
||||
}
|
||||
|
||||
// fallback to local
|
||||
if (!Array.isArray(this.authActiveAuthMethods) || this.authActiveAuthMethods.length == 0) {
|
||||
this.authActiveAuthMethods = ['local']
|
||||
}
|
||||
|
||||
// Migrations
|
||||
if (settings.storeCoverWithBook != undefined) { // storeCoverWithBook was renamed to storeCoverWithItem in 2.0.0
|
||||
|
|
@ -112,9 +159,9 @@ class ServerSettings {
|
|||
this.metadataFileFormat = 'abs'
|
||||
}
|
||||
|
||||
// Validation
|
||||
if (!['abs', 'json'].includes(this.metadataFileFormat)) {
|
||||
Logger.error(`[ServerSettings] construct: Invalid metadataFileFormat ${this.metadataFileFormat}`)
|
||||
// As of v2.4.5 only json is supported
|
||||
if (this.metadataFileFormat !== 'json') {
|
||||
Logger.warn(`[ServerSettings] Invalid metadataFileFormat ${this.metadataFileFormat} (as of v2.4.5 only json is supported)`)
|
||||
this.metadataFileFormat = 'json'
|
||||
}
|
||||
|
||||
|
|
@ -130,11 +177,8 @@ class ServerSettings {
|
|||
scannerFindCovers: this.scannerFindCovers,
|
||||
scannerCoverProvider: this.scannerCoverProvider,
|
||||
scannerParseSubtitle: this.scannerParseSubtitle,
|
||||
scannerPreferAudioMetadata: this.scannerPreferAudioMetadata,
|
||||
scannerPreferOpfMetadata: this.scannerPreferOpfMetadata,
|
||||
scannerPreferMatchedMetadata: this.scannerPreferMatchedMetadata,
|
||||
scannerDisableWatcher: this.scannerDisableWatcher,
|
||||
scannerPreferOverdriveMediaMarker: this.scannerPreferOverdriveMediaMarker,
|
||||
storeCoverWithItem: this.storeCoverWithItem,
|
||||
storeMetadataWithItem: this.storeMetadataWithItem,
|
||||
metadataFileFormat: this.metadataFileFormat,
|
||||
|
|
@ -155,23 +199,100 @@ class ServerSettings {
|
|||
timeFormat: this.timeFormat,
|
||||
language: this.language,
|
||||
logLevel: this.logLevel,
|
||||
version: this.version
|
||||
version: this.version,
|
||||
buildNumber: this.buildNumber,
|
||||
authActiveAuthMethods: this.authActiveAuthMethods,
|
||||
authOpenIDIssuerURL: this.authOpenIDIssuerURL,
|
||||
authOpenIDAuthorizationURL: this.authOpenIDAuthorizationURL,
|
||||
authOpenIDTokenURL: this.authOpenIDTokenURL,
|
||||
authOpenIDUserInfoURL: this.authOpenIDUserInfoURL,
|
||||
authOpenIDJwksURL: this.authOpenIDJwksURL,
|
||||
authOpenIDLogoutURL: this.authOpenIDLogoutURL,
|
||||
authOpenIDClientID: this.authOpenIDClientID, // Do not return to client
|
||||
authOpenIDClientSecret: this.authOpenIDClientSecret, // Do not return to client
|
||||
authOpenIDButtonText: this.authOpenIDButtonText,
|
||||
authOpenIDAutoLaunch: this.authOpenIDAutoLaunch,
|
||||
authOpenIDAutoRegister: this.authOpenIDAutoRegister,
|
||||
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
|
||||
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs // Do not return to client
|
||||
}
|
||||
}
|
||||
|
||||
toJSONForBrowser() {
|
||||
const json = this.toJSON()
|
||||
delete json.tokenSecret
|
||||
delete json.authOpenIDClientID
|
||||
delete json.authOpenIDClientSecret
|
||||
delete json.authOpenIDMobileRedirectURIs
|
||||
return json
|
||||
}
|
||||
|
||||
get supportedAuthMethods() {
|
||||
return ['local', 'openid']
|
||||
}
|
||||
|
||||
/**
|
||||
* Auth settings required for openid to be valid
|
||||
*/
|
||||
get isOpenIDAuthSettingsValid() {
|
||||
return this.authOpenIDIssuerURL &&
|
||||
this.authOpenIDAuthorizationURL &&
|
||||
this.authOpenIDTokenURL &&
|
||||
this.authOpenIDUserInfoURL &&
|
||||
this.authOpenIDJwksURL &&
|
||||
this.authOpenIDClientID &&
|
||||
this.authOpenIDClientSecret
|
||||
}
|
||||
|
||||
get authenticationSettings() {
|
||||
return {
|
||||
authActiveAuthMethods: this.authActiveAuthMethods,
|
||||
authOpenIDIssuerURL: this.authOpenIDIssuerURL,
|
||||
authOpenIDAuthorizationURL: this.authOpenIDAuthorizationURL,
|
||||
authOpenIDTokenURL: this.authOpenIDTokenURL,
|
||||
authOpenIDUserInfoURL: this.authOpenIDUserInfoURL,
|
||||
authOpenIDJwksURL: this.authOpenIDJwksURL,
|
||||
authOpenIDLogoutURL: this.authOpenIDLogoutURL,
|
||||
authOpenIDClientID: this.authOpenIDClientID, // Do not return to client
|
||||
authOpenIDClientSecret: this.authOpenIDClientSecret, // Do not return to client
|
||||
authOpenIDButtonText: this.authOpenIDButtonText,
|
||||
authOpenIDAutoLaunch: this.authOpenIDAutoLaunch,
|
||||
authOpenIDAutoRegister: this.authOpenIDAutoRegister,
|
||||
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
|
||||
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs // Do not return to client
|
||||
}
|
||||
}
|
||||
|
||||
get authFormData() {
|
||||
const clientFormData = {}
|
||||
if (this.authActiveAuthMethods.includes('openid')) {
|
||||
clientFormData.authOpenIDButtonText = this.authOpenIDButtonText
|
||||
clientFormData.authOpenIDAutoLaunch = this.authOpenIDAutoLaunch
|
||||
}
|
||||
return clientFormData
|
||||
}
|
||||
|
||||
/**
|
||||
* Update server settings
|
||||
*
|
||||
* @param {Object} payload
|
||||
* @returns {boolean} true if updates were made
|
||||
*/
|
||||
update(payload) {
|
||||
var hasUpdates = false
|
||||
let hasUpdates = false
|
||||
for (const key in payload) {
|
||||
if (key === 'sortingPrefixes' && payload[key] && payload[key].length) {
|
||||
var prefixesCleaned = payload[key].filter(prefix => !!prefix).map(prefix => prefix.toLowerCase())
|
||||
if (prefixesCleaned.join(',') !== this[key].join(',')) {
|
||||
this[key] = [...prefixesCleaned]
|
||||
if (key === 'sortingPrefixes') {
|
||||
// Sorting prefixes are updated with the /api/sorting-prefixes endpoint
|
||||
continue
|
||||
} else if (key === 'authActiveAuthMethods') {
|
||||
if (!payload[key]?.length) {
|
||||
Logger.error(`[ServerSettings] Invalid authActiveAuthMethods`, payload[key])
|
||||
continue
|
||||
}
|
||||
this.authActiveAuthMethods.sort()
|
||||
payload[key].sort()
|
||||
if (payload[key].join() !== this.authActiveAuthMethods.join()) {
|
||||
this.authActiveAuthMethods = payload[key]
|
||||
hasUpdates = true
|
||||
}
|
||||
} else if (this[key] !== payload[key]) {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ class User {
|
|||
this.id = null
|
||||
this.oldUserId = null // TODO: Temp for keeping old access tokens
|
||||
this.username = null
|
||||
this.email = null
|
||||
this.pash = null
|
||||
this.type = null
|
||||
this.token = null
|
||||
|
|
@ -23,6 +24,8 @@ class User {
|
|||
this.librariesAccessible = [] // Library IDs (Empty if ALL libraries)
|
||||
this.itemTagsSelected = [] // Empty if ALL item tags accessible
|
||||
|
||||
this.authOpenIDSub = null
|
||||
|
||||
if (user) {
|
||||
this.construct(user)
|
||||
}
|
||||
|
|
@ -34,6 +37,9 @@ class User {
|
|||
get isAdmin() {
|
||||
return this.type === 'admin'
|
||||
}
|
||||
get isUser() {
|
||||
return this.type === 'user'
|
||||
}
|
||||
get isGuest() {
|
||||
return this.type === 'guest'
|
||||
}
|
||||
|
|
@ -62,7 +68,7 @@ class User {
|
|||
getDefaultUserPermissions() {
|
||||
return {
|
||||
download: true,
|
||||
update: true,
|
||||
update: this.type === 'root' || this.type === 'admin',
|
||||
delete: this.type === 'root',
|
||||
upload: this.type === 'root' || this.type === 'admin',
|
||||
accessAllLibraries: true,
|
||||
|
|
@ -76,6 +82,7 @@ class User {
|
|||
id: this.id,
|
||||
oldUserId: this.oldUserId,
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
pash: this.pash,
|
||||
type: this.type,
|
||||
token: this.token,
|
||||
|
|
@ -88,7 +95,8 @@ class User {
|
|||
createdAt: this.createdAt,
|
||||
permissions: this.permissions,
|
||||
librariesAccessible: [...this.librariesAccessible],
|
||||
itemTagsSelected: [...this.itemTagsSelected]
|
||||
itemTagsSelected: [...this.itemTagsSelected],
|
||||
authOpenIDSub: this.authOpenIDSub
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -97,6 +105,7 @@ class User {
|
|||
id: this.id,
|
||||
oldUserId: this.oldUserId,
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
type: this.type,
|
||||
token: (this.type === 'root' && hideRootToken) ? '' : this.token,
|
||||
mediaProgress: this.mediaProgress ? this.mediaProgress.map(li => li.toJSON()) : [],
|
||||
|
|
@ -140,6 +149,7 @@ class User {
|
|||
this.id = user.id
|
||||
this.oldUserId = user.oldUserId
|
||||
this.username = user.username
|
||||
this.email = user.email || null
|
||||
this.pash = user.pash
|
||||
this.type = user.type
|
||||
this.token = user.token
|
||||
|
|
@ -179,12 +189,14 @@ class User {
|
|||
|
||||
this.librariesAccessible = [...(user.librariesAccessible || [])]
|
||||
this.itemTagsSelected = [...(user.itemTagsSelected || [])]
|
||||
|
||||
this.authOpenIDSub = user.authOpenIDSub || null
|
||||
}
|
||||
|
||||
update(payload) {
|
||||
var hasUpdates = false
|
||||
// Update the following keys:
|
||||
const keysToCheck = ['pash', 'type', 'username', 'isActive']
|
||||
const keysToCheck = ['pash', 'type', 'username', 'email', 'isActive']
|
||||
keysToCheck.forEach((key) => {
|
||||
if (payload[key] !== undefined) {
|
||||
if (key === 'isActive' || payload[key]) { // pash, type, username must evaluate to true (cannot be null or empty)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,27 @@ class Audible {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Audible will sometimes send sequences with "Book 1" or "2, Dramatized Adaptation"
|
||||
* @see https://github.com/advplyr/audiobookshelf/issues/2380
|
||||
* @see https://github.com/advplyr/audiobookshelf/issues/1339
|
||||
*
|
||||
* @param {string} seriesName
|
||||
* @param {string} sequence
|
||||
* @returns {string}
|
||||
*/
|
||||
cleanSeriesSequence(seriesName, sequence) {
|
||||
if (!sequence) return ''
|
||||
let updatedSequence = sequence.replace(/Book /, '').trim()
|
||||
if (updatedSequence.includes(' ')) {
|
||||
updatedSequence = updatedSequence.split(' ').shift().replace(/,$/, '')
|
||||
}
|
||||
if (sequence !== updatedSequence) {
|
||||
Logger.debug(`[Audible] Series "${seriesName}" sequence was cleaned from "${sequence}" to "${updatedSequence}"`)
|
||||
}
|
||||
return updatedSequence
|
||||
}
|
||||
|
||||
cleanResult(item) {
|
||||
const { title, subtitle, asin, authors, narrators, publisherName, summary, releaseDate, image, genres, seriesPrimary, seriesSecondary, language, runtimeLengthMin, formatType } = item
|
||||
|
||||
|
|
@ -25,13 +46,13 @@ class Audible {
|
|||
if (seriesPrimary) {
|
||||
series.push({
|
||||
series: seriesPrimary.name,
|
||||
sequence: (seriesPrimary.position || '').replace(/Book /, '') // Can be badly formatted see #1339
|
||||
sequence: this.cleanSeriesSequence(seriesPrimary.name, seriesPrimary.position || '')
|
||||
})
|
||||
}
|
||||
if (seriesSecondary) {
|
||||
series.push({
|
||||
series: seriesSecondary.name,
|
||||
sequence: (seriesSecondary.position || '').replace(/Book /, '')
|
||||
sequence: this.cleanSeriesSequence(seriesSecondary.name, seriesSecondary.position || '')
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -64,7 +85,7 @@ class Audible {
|
|||
}
|
||||
|
||||
asinSearch(asin, region) {
|
||||
asin = encodeURIComponent(asin);
|
||||
asin = encodeURIComponent(asin)
|
||||
var regionQuery = region ? `?region=${region}` : ''
|
||||
var url = `https://api.audnex.us/books/${asin}${regionQuery}`
|
||||
Logger.debug(`[Audible] ASIN url: ${url}`)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
const sequelize = require('sequelize')
|
||||
const express = require('express')
|
||||
const Path = require('path')
|
||||
|
||||
|
|
@ -36,10 +35,12 @@ const Series = require('../objects/entities/Series')
|
|||
|
||||
class ApiRouter {
|
||||
constructor(Server) {
|
||||
/** @type {import('../Auth')} */
|
||||
this.auth = Server.auth
|
||||
this.playbackSessionManager = Server.playbackSessionManager
|
||||
this.abMergeManager = Server.abMergeManager
|
||||
this.backupManager = Server.backupManager
|
||||
/** @type {import('../Watcher')} */
|
||||
this.watcher = Server.watcher
|
||||
this.podcastManager = Server.podcastManager
|
||||
this.audioMetadataManager = Server.audioMetadataManager
|
||||
|
|
@ -47,7 +48,7 @@ class ApiRouter {
|
|||
this.cronManager = Server.cronManager
|
||||
this.notificationManager = Server.notificationManager
|
||||
this.emailManager = Server.emailManager
|
||||
this.taskManager = Server.taskManager
|
||||
this.apiCacheManager = Server.apiCacheManager
|
||||
|
||||
this.router = express()
|
||||
this.router.disable('x-powered-by')
|
||||
|
|
@ -58,6 +59,7 @@ class ApiRouter {
|
|||
//
|
||||
// Library Routes
|
||||
//
|
||||
this.router.get(/^\/libraries/, this.apiCacheManager.middleware)
|
||||
this.router.post('/libraries', LibraryController.create.bind(this))
|
||||
this.router.get('/libraries', LibraryController.findAll.bind(this))
|
||||
this.router.get('/libraries/:id', LibraryController.middleware.bind(this), LibraryController.findOne.bind(this))
|
||||
|
|
@ -84,6 +86,7 @@ class ApiRouter {
|
|||
this.router.get('/libraries/:id/recent-episodes', LibraryController.middleware.bind(this), LibraryController.getRecentEpisodes.bind(this))
|
||||
this.router.get('/libraries/:id/opml', LibraryController.middleware.bind(this), LibraryController.getOPMLFile.bind(this))
|
||||
this.router.post('/libraries/order', LibraryController.reorder.bind(this))
|
||||
this.router.post('/libraries/:id/remove-metadata', LibraryController.middleware.bind(this), LibraryController.removeAllMetadataFiles.bind(this))
|
||||
|
||||
//
|
||||
// Item Routes
|
||||
|
|
@ -177,6 +180,7 @@ class ApiRouter {
|
|||
this.router.get('/me/items-in-progress', MeController.getAllLibraryItemsInProgress.bind(this))
|
||||
this.router.get('/me/series/:id/remove-from-continue-listening', MeController.removeSeriesFromContinueListening.bind(this))
|
||||
this.router.get('/me/series/:id/readd-to-continue-listening', MeController.readdSeriesFromContinueListening.bind(this))
|
||||
this.router.get('/me/stats/year/:year', MeController.getStatsForYear.bind(this))
|
||||
|
||||
//
|
||||
// Backup Routes
|
||||
|
|
@ -202,6 +206,8 @@ class ApiRouter {
|
|||
this.router.delete('/authors/:id', AuthorController.middleware.bind(this), AuthorController.delete.bind(this))
|
||||
this.router.post('/authors/:id/match', AuthorController.middleware.bind(this), AuthorController.match.bind(this))
|
||||
this.router.get('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.getImage.bind(this))
|
||||
this.router.post('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.uploadImage.bind(this))
|
||||
this.router.delete('/authors/:id/image', AuthorController.middleware.bind(this), AuthorController.deleteImage.bind(this))
|
||||
|
||||
//
|
||||
// Series Routes
|
||||
|
|
@ -215,6 +221,7 @@ class ApiRouter {
|
|||
this.router.get('/sessions', SessionController.getAllWithUserData.bind(this))
|
||||
this.router.delete('/sessions/:id', SessionController.middleware.bind(this), SessionController.delete.bind(this))
|
||||
this.router.get('/sessions/open', SessionController.getOpenSessions.bind(this))
|
||||
this.router.post('/sessions/batch/delete', SessionController.batchDelete.bind(this))
|
||||
this.router.post('/session/local', SessionController.syncLocal.bind(this))
|
||||
this.router.post('/session/local-all', SessionController.syncLocalSessions.bind(this))
|
||||
// TODO: Update these endpoints because they are only for open playback sessions
|
||||
|
|
@ -253,11 +260,11 @@ class ApiRouter {
|
|||
//
|
||||
// Email Routes (Admin and up)
|
||||
//
|
||||
this.router.get('/emails/settings', EmailController.middleware.bind(this), EmailController.getSettings.bind(this))
|
||||
this.router.patch('/emails/settings', EmailController.middleware.bind(this), EmailController.updateSettings.bind(this))
|
||||
this.router.post('/emails/test', EmailController.middleware.bind(this), EmailController.sendTest.bind(this))
|
||||
this.router.post('/emails/ereader-devices', EmailController.middleware.bind(this), EmailController.updateEReaderDevices.bind(this))
|
||||
this.router.post('/emails/send-ebook-to-device', EmailController.middleware.bind(this), EmailController.sendEBookToDevice.bind(this))
|
||||
this.router.get('/emails/settings', EmailController.adminMiddleware.bind(this), EmailController.getSettings.bind(this))
|
||||
this.router.patch('/emails/settings', EmailController.adminMiddleware.bind(this), EmailController.updateSettings.bind(this))
|
||||
this.router.post('/emails/test', EmailController.adminMiddleware.bind(this), EmailController.sendTest.bind(this))
|
||||
this.router.post('/emails/ereader-devices', EmailController.adminMiddleware.bind(this), EmailController.updateEReaderDevices.bind(this))
|
||||
this.router.post('/emails/send-ebook-to-device', EmailController.sendEBookToDevice.bind(this))
|
||||
|
||||
//
|
||||
// Search Routes
|
||||
|
|
@ -307,35 +314,10 @@ class ApiRouter {
|
|||
this.router.post('/genres/rename', MiscController.renameGenre.bind(this))
|
||||
this.router.delete('/genres/:genre', MiscController.deleteGenre.bind(this))
|
||||
this.router.post('/validate-cron', MiscController.validateCronExpression.bind(this))
|
||||
}
|
||||
|
||||
async getDirectories(dir, relpath, excludedDirs, level = 0) {
|
||||
try {
|
||||
const paths = await fs.readdir(dir)
|
||||
|
||||
let dirs = await Promise.all(paths.map(async dirname => {
|
||||
const fullPath = Path.join(dir, dirname)
|
||||
const path = Path.join(relpath, dirname)
|
||||
|
||||
const isDir = (await fs.lstat(fullPath)).isDirectory()
|
||||
if (isDir && !excludedDirs.includes(path) && dirname !== 'node_modules') {
|
||||
return {
|
||||
path,
|
||||
dirname,
|
||||
fullPath,
|
||||
level,
|
||||
dirs: level < 4 ? (await this.getDirectories(fullPath, path, excludedDirs, level + 1)) : []
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}))
|
||||
dirs = dirs.filter(d => d)
|
||||
return dirs
|
||||
} catch (error) {
|
||||
Logger.error('Failed to readdir', dir, error)
|
||||
return []
|
||||
}
|
||||
this.router.get('/auth-settings', MiscController.getAuthSettings.bind(this))
|
||||
this.router.patch('/auth-settings', MiscController.updateAuthSettings.bind(this))
|
||||
this.router.post('/watcher/update', MiscController.updateWatchedPath.bind(this))
|
||||
this.router.get('/stats/year/:year', MiscController.getAdminStatsForYear.bind(this))
|
||||
}
|
||||
|
||||
//
|
||||
|
|
@ -482,18 +464,6 @@ class ApiRouter {
|
|||
return userSessions.sort((a, b) => b.updatedAt - a.updatedAt)
|
||||
}
|
||||
|
||||
async getAllSessionsWithUserData() {
|
||||
const sessions = await Database.getPlaybackSessions()
|
||||
sessions.sort((a, b) => b.updatedAt - a.updatedAt)
|
||||
const minifiedUserObjects = await Database.userModel.getMinifiedUserObjects()
|
||||
return sessions.map(se => {
|
||||
return {
|
||||
...se,
|
||||
user: minifiedUserObjects.find(u => u.id === se.userId) || null
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async getUserListeningStatsHelpers(userId) {
|
||||
const today = date.format(new Date(), 'YYYY-MM-DD')
|
||||
|
||||
|
|
|
|||
|
|
@ -27,28 +27,60 @@ class HlsRouter {
|
|||
return Number(num_part)
|
||||
}
|
||||
|
||||
async streamFileRequest(req, res) {
|
||||
var streamId = req.params.stream
|
||||
var fullFilePath = Path.join(this.playbackSessionManager.StreamsPath, streamId, req.params.file)
|
||||
/**
|
||||
* Ensure filepath is inside streamDir
|
||||
* Used to prevent arbitrary file reads
|
||||
* @see https://nodejs.org/api/path.html#pathrelativefrom-to
|
||||
*
|
||||
* @param {string} streamDir
|
||||
* @param {string} filepath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
validateStreamFilePath(streamDir, filepath) {
|
||||
const relative = Path.relative(streamDir, filepath)
|
||||
return relative && !relative.startsWith('..') && !Path.isAbsolute(relative)
|
||||
}
|
||||
|
||||
var exists = await fs.pathExists(fullFilePath)
|
||||
if (!exists) {
|
||||
/**
|
||||
* GET /hls/:stream/:file
|
||||
* File must have extname .ts or .m3u8
|
||||
*
|
||||
* @param {express.Request} req
|
||||
* @param {express.Response} res
|
||||
*/
|
||||
async streamFileRequest(req, res) {
|
||||
const streamId = req.params.stream
|
||||
// Ensure stream is open
|
||||
const stream = this.playbackSessionManager.getStream(streamId)
|
||||
if (!stream) {
|
||||
Logger.error(`[HlsRouter] Stream "${streamId}" does not exist`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
// Ensure stream filepath is valid
|
||||
const streamDir = Path.join(this.playbackSessionManager.StreamsPath, streamId)
|
||||
const fullFilePath = Path.join(streamDir, req.params.file)
|
||||
if (!this.validateStreamFilePath(streamDir, fullFilePath)) {
|
||||
Logger.error(`[HlsRouter] Invalid file parameter "${req.params.file}"`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
const fileExt = Path.extname(req.params.file)
|
||||
if (fileExt !== '.ts' && fileExt !== '.m3u8') {
|
||||
Logger.error(`[HlsRouter] Invalid file parameter "${req.params.file}" extname. Must be .ts or .m3u8`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
if (!(await fs.pathExists(fullFilePath))) {
|
||||
Logger.warn('File path does not exist', fullFilePath)
|
||||
|
||||
var fileExt = Path.extname(req.params.file)
|
||||
if (fileExt === '.ts' || fileExt === '.m4s') {
|
||||
var segNum = this.parseSegmentFilename(req.params.file)
|
||||
var stream = this.playbackSessionManager.getStream(streamId)
|
||||
if (!stream) {
|
||||
Logger.error(`[HlsRouter] Stream ${streamId} does not exist`)
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
if (fileExt === '.ts') {
|
||||
const segNum = this.parseSegmentFilename(req.params.file)
|
||||
|
||||
if (stream.isResetting) {
|
||||
Logger.info(`[HlsRouter] Stream ${streamId} is currently resetting`)
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
var startTimeForReset = await stream.checkSegmentNumberRequest(segNum)
|
||||
const startTimeForReset = await stream.checkSegmentNumberRequest(segNum)
|
||||
if (startTimeForReset) {
|
||||
// HLS.js will restart the stream at the new time
|
||||
Logger.info(`[HlsRouter] Resetting Stream - notify client @${startTimeForReset}s`)
|
||||
|
|
@ -56,13 +88,12 @@ class HlsRouter {
|
|||
startTime: startTimeForReset,
|
||||
streamId: stream.id
|
||||
})
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
}
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
// Logger.info('Sending file', fullFilePath)
|
||||
res.sendFile(fullFilePath)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
const express = require('express')
|
||||
const libraries = require('./libraries')
|
||||
|
||||
const router = express.Router()
|
||||
|
||||
router.use('/libraries', libraries)
|
||||
|
||||
module.exports = router
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
const express = require('express')
|
||||
|
||||
const router = express.Router()
|
||||
|
||||
// TODO: Add library routes
|
||||
|
||||
module.exports = router
|
||||
84
server/scanner/AbsMetadataFileScanner.js
Normal file
84
server/scanner/AbsMetadataFileScanner.js
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
const Path = require('path')
|
||||
const fsExtra = require('../libs/fsExtra')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
|
||||
class AbsMetadataFileScanner {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* Check for metadata.json file and set book metadata
|
||||
*
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {Object} bookMetadata
|
||||
* @param {string} [existingLibraryItemId]
|
||||
*/
|
||||
async scanBookMetadataFile(libraryScan, libraryItemData, bookMetadata, existingLibraryItemId = null) {
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
metadataFilePath = Path.join(metadataPath, 'metadata.json')
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}"`)
|
||||
const abMetadata = abmetadataGenerator.parseJson(metadataText) || {}
|
||||
for (const key in abMetadata) {
|
||||
// TODO: When to override with null or empty arrays?
|
||||
if (abMetadata[key] === undefined || abMetadata[key] === null) continue
|
||||
if (key === 'authors' && !abMetadata.authors?.length) continue
|
||||
if (key === 'genres' && !abMetadata.genres?.length) continue
|
||||
if (key === 'tags' && !abMetadata.tags?.length) continue
|
||||
if (key === 'chapters' && !abMetadata.chapters?.length) continue
|
||||
|
||||
bookMetadata[key] = abMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for metadata.json file and set podcast metadata
|
||||
*
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {Object} podcastMetadata
|
||||
* @param {string} [existingLibraryItemId]
|
||||
*/
|
||||
async scanPodcastMetadataFile(libraryScan, libraryItemData, podcastMetadata, existingLibraryItemId = null) {
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
metadataFilePath = Path.join(metadataPath, 'metadata.json')
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}"`)
|
||||
const abMetadata = abmetadataGenerator.parseJson(metadataText) || {}
|
||||
for (const key in abMetadata) {
|
||||
if (abMetadata[key] === undefined || abMetadata[key] === null) continue
|
||||
if (key === 'tags' && !abMetadata.tags?.length) continue
|
||||
|
||||
podcastMetadata[key] = abMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = new AbsMetadataFileScanner()
|
||||
|
|
@ -1,6 +1,9 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../Logger')
|
||||
const prober = require('../utils/prober')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { parseOverdriveMediaMarkersAsChapters } = require('../utils/parsers/parseOverdriveMediaMarkers')
|
||||
const parseNameString = require('../utils/parsers/parseNameString')
|
||||
const LibraryItem = require('../models/LibraryItem')
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
|
||||
|
|
@ -205,5 +208,339 @@ class AudioFileScanner {
|
|||
Logger.debug(`[AudioFileScanner] Running ffprobe for audio file at "${audioFile.metadata.path}"`)
|
||||
return prober.rawProbe(audioFile.metadata.path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set book metadata & chapters from audio file meta tags
|
||||
*
|
||||
* @param {string} bookTitle
|
||||
* @param {import('../models/Book').AudioFileObject} audioFile
|
||||
* @param {Object} bookMetadata
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
*/
|
||||
setBookMetadataFromAudioMetaTags(bookTitle, audioFiles, bookMetadata, libraryScan) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComposer',
|
||||
key: 'narrators'
|
||||
},
|
||||
{
|
||||
tag: 'tagDescription',
|
||||
altTag: 'tagComment',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagPublisher',
|
||||
key: 'publisher'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'publishedYear'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagTitle',
|
||||
key: 'title',
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
altTag: 'tagAlbumArtist',
|
||||
key: 'authors'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagSeries',
|
||||
key: 'series'
|
||||
},
|
||||
{
|
||||
tag: 'tagIsbn',
|
||||
key: 'isbn'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagASIN',
|
||||
key: 'asin'
|
||||
}
|
||||
]
|
||||
|
||||
const firstScannedFile = audioFiles[0]
|
||||
const audioFileMetaTags = firstScannedFile.metaTags
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'narrators') {
|
||||
bookMetadata.narrators = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'authors') {
|
||||
bookMetadata.authors = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'genres') {
|
||||
bookMetadata.genres = this.parseGenresString(value)
|
||||
} else if (mapping.key === 'series') {
|
||||
bookMetadata.series = [
|
||||
{
|
||||
name: value,
|
||||
sequence: audioFileMetaTags.tagSeriesPart || null
|
||||
}
|
||||
]
|
||||
} else {
|
||||
bookMetadata[mapping.key] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Set chapters
|
||||
const chapters = this.getBookChaptersFromAudioFiles(bookTitle, audioFiles, libraryScan)
|
||||
if (chapters.length) {
|
||||
bookMetadata.chapters = chapters
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set podcast metadata from first audio file
|
||||
*
|
||||
* @param {import('../models/Book').AudioFileObject} audioFile
|
||||
* @param {Object} podcastMetadata
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
*/
|
||||
setPodcastMetadataFromAudioMetaTags(audioFile, podcastMetadata, libraryScan) {
|
||||
const audioFileMetaTags = audioFile.metaTags
|
||||
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagSeries',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
key: 'author'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagItunesId',
|
||||
key: 'itunesId'
|
||||
},
|
||||
{
|
||||
tag: 'tagPodcastType',
|
||||
key: 'podcastType',
|
||||
}
|
||||
]
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'genres') {
|
||||
podcastMetadata.genres = this.parseGenresString(value)
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata.genres.join(', ')}`)
|
||||
} else {
|
||||
podcastMetadata[mapping.key] = value
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../models/PodcastEpisode')} podcastEpisode Not the model when creating new podcast
|
||||
* @param {import('./ScanLogger')} scanLogger
|
||||
*/
|
||||
setPodcastEpisodeMetadataFromAudioMetaTags(podcastEpisode, scanLogger) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComment',
|
||||
altTag: 'tagSubtitle',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'pubDate'
|
||||
},
|
||||
{
|
||||
tag: 'tagDisc',
|
||||
key: 'season',
|
||||
},
|
||||
{
|
||||
tag: 'tagTrack',
|
||||
altTag: 'tagSeriesPart',
|
||||
key: 'episode'
|
||||
},
|
||||
{
|
||||
tag: 'tagTitle',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagEpisodeType',
|
||||
key: 'episodeType'
|
||||
}
|
||||
]
|
||||
|
||||
const audioFileMetaTags = podcastEpisode.audioFile.metaTags
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
tagToUse = mapping.altTag
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'pubDate') {
|
||||
const pubJsDate = new Date(value)
|
||||
if (pubJsDate && !isNaN(pubJsDate)) {
|
||||
podcastEpisode.publishedAt = pubJsDate.valueOf()
|
||||
podcastEpisode.pubDate = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping pubDate with tag ${tagToUse} has invalid date "${value}"`)
|
||||
}
|
||||
} else if (mapping.key === 'episodeType') {
|
||||
if (['full', 'trailer', 'bonus'].includes(value)) {
|
||||
podcastEpisode.episodeType = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping episodeType with invalid value "${value}". Must be one of [full, trailer, bonus].`)
|
||||
}
|
||||
} else {
|
||||
podcastEpisode[mapping.key] = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bookTitle
|
||||
* @param {AudioFile[]} audioFiles
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {import('../models/Book').ChapterObject[]}
|
||||
*/
|
||||
getBookChaptersFromAudioFiles(bookTitle, audioFiles, libraryScan) {
|
||||
// If overdrive media markers are present then use those instead
|
||||
const overdriveChapters = parseOverdriveMediaMarkersAsChapters(audioFiles)
|
||||
if (overdriveChapters?.length) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, 'Overdrive Media Markers and preference found! Using these for chapter definitions')
|
||||
|
||||
return overdriveChapters
|
||||
}
|
||||
|
||||
let chapters = []
|
||||
|
||||
// If first audio file has embedded chapters then use embedded chapters
|
||||
if (audioFiles[0].chapters?.length) {
|
||||
// If all files chapters are the same, then only make chapters for the first file
|
||||
if (
|
||||
audioFiles.length === 1 ||
|
||||
audioFiles.length > 1 &&
|
||||
audioFiles[0].chapters.length === audioFiles[1].chapters?.length &&
|
||||
audioFiles[0].chapters.every((c, i) => c.title === audioFiles[1].chapters[i].title && c.start === audioFiles[1].chapters[i].start)
|
||||
) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters in first audio file ${audioFiles[0].metadata?.path}`)
|
||||
chapters = audioFiles[0].chapters.map((c) => ({ ...c }))
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters from all audio files ${audioFiles[0].metadata?.path}`)
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
const afChapters = file.chapters?.map((c) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime,
|
||||
})) ?? []
|
||||
chapters = chapters.concat(afChapters)
|
||||
|
||||
currChapterId += file.chapters?.length ?? 0
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
return chapters
|
||||
}
|
||||
} else if (audioFiles.length > 1) {
|
||||
|
||||
// In some cases the ID3 title tag for each file is the chapter title, the criteria to determine if this will be used
|
||||
// 1. Every audio file has an ID3 title tag set
|
||||
// 2. None of the title tags are the same as the book title
|
||||
// 3. Every ID3 title tag is unique
|
||||
const metaTagTitlesFound = [...new Set(audioFiles.map(af => af.metaTags?.tagTitle).filter(tagTitle => !!tagTitle && tagTitle !== bookTitle))]
|
||||
const useMetaTagAsTitle = metaTagTitlesFound.length === audioFiles.length
|
||||
|
||||
// Build chapters from audio files
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
let title = file.metadata.filename ? Path.basename(file.metadata.filename, Path.extname(file.metadata.filename)) : `Chapter ${currChapterId}`
|
||||
if (useMetaTagAsTitle) {
|
||||
title = file.metaTags.tagTitle
|
||||
}
|
||||
|
||||
chapters.push({
|
||||
id: currChapterId++,
|
||||
start: currStartTime,
|
||||
end: currStartTime + file.duration,
|
||||
title
|
||||
})
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a genre string into multiple genres
|
||||
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
|
||||
*
|
||||
* @param {string} genreTag
|
||||
* @returns {string[]}
|
||||
*/
|
||||
parseGenresString(genreTag) {
|
||||
if (!genreTag?.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
}
|
||||
module.exports = new AudioFileScanner()
|
||||
|
|
@ -3,10 +3,8 @@ const Path = require('path')
|
|||
const sequelize = require('sequelize')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { getTitleIgnorePrefix, areEquivalent } = require('../utils/index')
|
||||
const { parseOpfMetadataXML } = require('../utils/parsers/parseOpfMetadata')
|
||||
const { parseOverdriveMediaMarkersAsChapters } = require('../utils/parsers/parseOverdriveMediaMarkers')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const parseNameString = require('../utils/parsers/parseNameString')
|
||||
const parseEbookMetadata = require('../utils/parsers/parseEbookMetadata')
|
||||
const globals = require('../utils/globals')
|
||||
const AudioFileScanner = require('./AudioFileScanner')
|
||||
const Database = require('../Database')
|
||||
|
|
@ -16,9 +14,13 @@ const CoverManager = require('../managers/CoverManager')
|
|||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const fsExtra = require("../libs/fsExtra")
|
||||
const LibraryScan = require("./LibraryScan")
|
||||
const BookFinder = require('../finders/BookFinder')
|
||||
|
||||
const LibraryScan = require("./LibraryScan")
|
||||
const OpfFileScanner = require('./OpfFileScanner')
|
||||
const NfoFileScanner = require('./NfoFileScanner')
|
||||
const AbsMetadataFileScanner = require('./AbsMetadataFileScanner')
|
||||
|
||||
/**
|
||||
* Metadata for books pulled from files
|
||||
* @typedef BookMetadataObject
|
||||
|
|
@ -50,7 +52,7 @@ class BookScanner {
|
|||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<import('../models/LibraryItem')>}
|
||||
* @returns {Promise<{libraryItem:import('../models/LibraryItem'), wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanExistingBookLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
/** @type {import('../models/Book')} */
|
||||
|
|
@ -75,8 +77,8 @@ class BookScanner {
|
|||
]
|
||||
})
|
||||
|
||||
let hasMediaChanges = libraryItemData.hasAudioFileChanges
|
||||
if (libraryItemData.hasAudioFileChanges || libraryItemData.audioLibraryFiles.length !== media.audioFiles.length) {
|
||||
let hasMediaChanges = libraryItemData.hasAudioFileChanges || libraryItemData.audioLibraryFiles.length !== media.audioFiles.length
|
||||
if (hasMediaChanges) {
|
||||
// Filter out audio files that were removed
|
||||
media.audioFiles = media.audioFiles.filter(af => !libraryItemData.checkAudioFileRemoved(af))
|
||||
|
||||
|
|
@ -168,7 +170,9 @@ class BookScanner {
|
|||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan, existingLibraryItem.id)
|
||||
const ebookFileScanData = await parseEbookMetadata.parse(media.ebookFile)
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
|
||||
let authorsUpdated = false
|
||||
const bookAuthorsRemoved = []
|
||||
let seriesUpdated = false
|
||||
|
|
@ -215,7 +219,8 @@ class BookScanner {
|
|||
} else if (key === 'series') {
|
||||
// Check for series added
|
||||
for (const seriesObj of bookMetadata.series) {
|
||||
if (!media.series.some(se => se.name === seriesObj.name)) {
|
||||
const existingBookSeries = media.series.find(se => se.name === seriesObj.name)
|
||||
if (!existingBookSeries) {
|
||||
const existingSeries = Database.libraryFilterData[libraryItemData.libraryId].series.find(se => se.name === seriesObj.name)
|
||||
if (existingSeries) {
|
||||
await Database.bookSeriesModel.create({
|
||||
|
|
@ -236,6 +241,11 @@ class BookScanner {
|
|||
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" added new series "${seriesObj.name}"${seriesObj.sequence ? ` with sequence "${seriesObj.sequence}"` : ''}`)
|
||||
seriesUpdated = true
|
||||
}
|
||||
} else if (seriesObj.sequence && existingBookSeries.bookSeries.sequence !== seriesObj.sequence) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" series "${seriesObj.name}" sequence "${existingBookSeries.bookSeries.sequence || ''}" => "${seriesObj.sequence}"`)
|
||||
seriesUpdated = true
|
||||
existingBookSeries.bookSeries.sequence = seriesObj.sequence
|
||||
await existingBookSeries.bookSeries.save()
|
||||
}
|
||||
}
|
||||
// Check for series removed
|
||||
|
|
@ -309,24 +319,34 @@ class BookScanner {
|
|||
})
|
||||
}
|
||||
|
||||
// If no cover then extract cover from audio file if available OR search for cover if enabled in server settings
|
||||
// If no cover then extract cover from audio file OR from ebook
|
||||
const libraryItemDir = existingLibraryItem.isFile ? null : existingLibraryItem.path
|
||||
if (!media.coverPath) {
|
||||
const libraryItemDir = existingLibraryItem.isFile ? null : existingLibraryItem.path
|
||||
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(media.audioFiles, existingLibraryItem.id, libraryItemDir)
|
||||
let extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(media.audioFiles, existingLibraryItem.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" extracted embedded cover art from audio file to path "${extractedCoverPath}"`)
|
||||
media.coverPath = extractedCoverPath
|
||||
hasMediaChanges = true
|
||||
} else if (Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = media.authors.map(au => au.name).filter(au => au).join(', ')
|
||||
const coverPath = await this.searchForCover(existingLibraryItem.id, libraryItemDir, media.title, authorName, libraryScan)
|
||||
if (coverPath) {
|
||||
media.coverPath = coverPath
|
||||
} else if (ebookFileScanData?.ebookCoverPath) {
|
||||
extractedCoverPath = await CoverManager.saveEbookCoverArt(ebookFileScanData, existingLibraryItem.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" extracted embedded cover art from ebook file to path "${extractedCoverPath}"`)
|
||||
media.coverPath = extractedCoverPath
|
||||
hasMediaChanges = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no cover then search for cover if enabled in server settings
|
||||
if (!media.coverPath && Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = media.authors.map(au => au.name).filter(au => au).join(', ')
|
||||
const coverPath = await this.searchForCover(existingLibraryItem.id, libraryItemDir, media.title, authorName, libraryScan)
|
||||
if (coverPath) {
|
||||
media.coverPath = coverPath
|
||||
hasMediaChanges = true
|
||||
}
|
||||
}
|
||||
|
||||
existingLibraryItem.media = media
|
||||
|
||||
let libraryItemUpdated = false
|
||||
|
|
@ -338,6 +358,19 @@ class BookScanner {
|
|||
libraryItemUpdated = global.ServerSettings.storeMetadataWithItem && !existingLibraryItem.isFile
|
||||
}
|
||||
|
||||
// If book has no audio files and no ebook then it is considered missing
|
||||
if (!media.audioFiles.length && !media.ebookFile) {
|
||||
if (!existingLibraryItem.isMissing) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Book "${bookMetadata.title}" has no audio files and no ebook file. Setting library item as missing`)
|
||||
existingLibraryItem.isMissing = true
|
||||
libraryItemUpdated = true
|
||||
}
|
||||
} else if (existingLibraryItem.isMissing) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Book "${bookMetadata.title}" was missing but now has media files. Setting library item as NOT missing`)
|
||||
existingLibraryItem.isMissing = false
|
||||
libraryItemUpdated = true
|
||||
}
|
||||
|
||||
// Check/update the isSupplementary flag on libraryFiles for the LibraryItem
|
||||
for (const libraryFile of existingLibraryItem.libraryFiles) {
|
||||
if (globals.SupportedEbookTypes.includes(libraryFile.metadata.ext.slice(1).toLowerCase())) {
|
||||
|
|
@ -360,7 +393,10 @@ class BookScanner {
|
|||
libraryScan.seriesRemovedFromBooks.push(...bookSeriesRemoved)
|
||||
libraryScan.authorsRemovedFromBooks.push(...bookAuthorsRemoved)
|
||||
|
||||
return existingLibraryItem
|
||||
return {
|
||||
libraryItem: existingLibraryItem,
|
||||
wasUpdated: hasMediaChanges || libraryItemUpdated || seriesUpdated || authorsUpdated
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -384,12 +420,14 @@ class BookScanner {
|
|||
return null
|
||||
}
|
||||
|
||||
let ebookFileScanData = null
|
||||
if (ebookLibraryFile) {
|
||||
ebookLibraryFile = ebookLibraryFile.toJSON()
|
||||
ebookLibraryFile.ebookFormat = ebookLibraryFile.metadata.ext.slice(1).toLowerCase()
|
||||
ebookFileScanData = await parseEbookMetadata.parse(ebookLibraryFile)
|
||||
}
|
||||
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, libraryItemData, libraryScan)
|
||||
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings)
|
||||
bookMetadata.explicit = !!bookMetadata.explicit // Ensure boolean
|
||||
bookMetadata.abridged = !!bookMetadata.abridged // Ensure boolean
|
||||
|
||||
|
|
@ -457,19 +495,28 @@ class BookScanner {
|
|||
}
|
||||
}
|
||||
|
||||
// If cover was not found in folder then check embedded covers in audio files OR search for cover
|
||||
// If cover was not found in folder then check embedded covers in audio files OR ebook file
|
||||
const libraryItemDir = libraryItemObj.isFile ? null : libraryItemObj.path
|
||||
if (!bookObject.coverPath) {
|
||||
const libraryItemDir = libraryItemObj.isFile ? null : libraryItemObj.path
|
||||
// Extract and save embedded cover art
|
||||
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(scannedAudioFiles, libraryItemObj.id, libraryItemDir)
|
||||
let extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(scannedAudioFiles, libraryItemObj.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Extracted embedded cover from audio file at "${extractedCoverPath}" for book "${bookObject.title}"`)
|
||||
bookObject.coverPath = extractedCoverPath
|
||||
} else if (Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = bookMetadata.authors.join(', ')
|
||||
bookObject.coverPath = await this.searchForCover(libraryItemObj.id, libraryItemDir, bookObject.title, authorName, libraryScan)
|
||||
} else if (ebookFileScanData?.ebookCoverPath) {
|
||||
extractedCoverPath = await CoverManager.saveEbookCoverArt(ebookFileScanData, libraryItemObj.id, libraryItemDir)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Extracted embedded cover from ebook file at "${extractedCoverPath}" for book "${bookObject.title}"`)
|
||||
bookObject.coverPath = extractedCoverPath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If cover not found then search for cover if enabled in settings
|
||||
if (!bookObject.coverPath && Database.serverSettings.scannerFindCovers) {
|
||||
const authorName = bookMetadata.authors.join(', ')
|
||||
bookObject.coverPath = await this.searchForCover(libraryItemObj.id, libraryItemDir, bookObject.title, authorName, libraryScan)
|
||||
}
|
||||
|
||||
libraryItemObj.book = bookObject
|
||||
const libraryItem = await Database.libraryItemModel.create(libraryItemObj, {
|
||||
include: {
|
||||
|
|
@ -546,228 +593,45 @@ class BookScanner {
|
|||
/**
|
||||
*
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {string} [existingLibraryItemId]
|
||||
* @returns {Promise<BookMetadataObject>}
|
||||
*/
|
||||
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan, existingLibraryItemId = null) {
|
||||
async getBookMetadataFromScanData(audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItemId = null) {
|
||||
// First set book metadata from folder/file names
|
||||
const bookMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title,
|
||||
titleIgnorePrefix: getTitleIgnorePrefix(libraryItemData.mediaMetadata.title),
|
||||
subtitle: libraryItemData.mediaMetadata.subtitle || undefined,
|
||||
publishedYear: libraryItemData.mediaMetadata.publishedYear || undefined,
|
||||
title: libraryItemData.mediaMetadata.title, // required
|
||||
titleIgnorePrefix: undefined,
|
||||
subtitle: undefined,
|
||||
publishedYear: undefined,
|
||||
publisher: undefined,
|
||||
description: undefined,
|
||||
isbn: undefined,
|
||||
asin: undefined,
|
||||
language: undefined,
|
||||
narrators: parseNameString.parse(libraryItemData.mediaMetadata.narrators)?.names || [],
|
||||
narrators: [],
|
||||
genres: [],
|
||||
tags: [],
|
||||
authors: parseNameString.parse(libraryItemData.mediaMetadata.author)?.names || [],
|
||||
authors: [],
|
||||
series: [],
|
||||
chapters: [],
|
||||
explicit: undefined,
|
||||
abridged: undefined,
|
||||
coverPath: undefined
|
||||
}
|
||||
if (libraryItemData.mediaMetadata.series) {
|
||||
bookMetadata.series.push({
|
||||
name: libraryItemData.mediaMetadata.series,
|
||||
sequence: libraryItemData.mediaMetadata.sequence || null
|
||||
})
|
||||
}
|
||||
|
||||
// Fill in or override book metadata from audio file meta tags
|
||||
if (audioFiles.length) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComposer',
|
||||
key: 'narrators'
|
||||
},
|
||||
{
|
||||
tag: 'tagDescription',
|
||||
altTag: 'tagComment',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagPublisher',
|
||||
key: 'publisher'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'publishedYear'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagTitle',
|
||||
key: 'title',
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
altTag: 'tagAlbumArtist',
|
||||
key: 'authors'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagSeries',
|
||||
key: 'series'
|
||||
},
|
||||
{
|
||||
tag: 'tagIsbn',
|
||||
key: 'isbn'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagASIN',
|
||||
key: 'asin'
|
||||
}
|
||||
]
|
||||
const overrideExistingDetails = Database.serverSettings.scannerPreferAudioMetadata
|
||||
const firstScannedFile = audioFiles[0]
|
||||
const audioFileMetaTags = firstScannedFile.metaTags
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'narrators' && (!bookMetadata.narrators.length || overrideExistingDetails)) {
|
||||
bookMetadata.narrators = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'authors' && (!bookMetadata.authors.length || overrideExistingDetails)) {
|
||||
bookMetadata.authors = parseNameString.parse(value)?.names || []
|
||||
} else if (mapping.key === 'genres' && (!bookMetadata.genres.length || overrideExistingDetails)) {
|
||||
bookMetadata.genres = this.parseGenresString(value)
|
||||
} else if (mapping.key === 'series' && (!bookMetadata.series.length || overrideExistingDetails)) {
|
||||
bookMetadata.series = [
|
||||
{
|
||||
name: value,
|
||||
sequence: audioFileMetaTags.tagSeriesPart || null
|
||||
}
|
||||
]
|
||||
} else if (!bookMetadata[mapping.key] || overrideExistingDetails) {
|
||||
bookMetadata[mapping.key] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// If desc.txt in library item folder then use this for description
|
||||
if (libraryItemData.descTxtLibraryFile) {
|
||||
const description = await readTextFile(libraryItemData.descTxtLibraryFile.metadata.path)
|
||||
if (description.trim()) bookMetadata.description = description.trim()
|
||||
}
|
||||
|
||||
// If reader.txt in library item folder then use this for narrator
|
||||
if (libraryItemData.readerTxtLibraryFile) {
|
||||
let narrator = await readTextFile(libraryItemData.readerTxtLibraryFile.metadata.path)
|
||||
narrator = narrator.split(/\r?\n/)[0]?.trim() || '' // Only use first line
|
||||
if (narrator) {
|
||||
bookMetadata.narrators = parseNameString.parse(narrator)?.names || []
|
||||
}
|
||||
}
|
||||
|
||||
// If opf file is found look for metadata
|
||||
if (libraryItemData.metadataOpfLibraryFile) {
|
||||
const xmlText = await readTextFile(libraryItemData.metadataOpfLibraryFile.metadata.path)
|
||||
const opfMetadata = xmlText ? await parseOpfMetadataXML(xmlText) : null
|
||||
if (opfMetadata) {
|
||||
const opfMetadataOverrideDetails = Database.serverSettings.scannerPreferOpfMetadata
|
||||
for (const key in opfMetadata) {
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length && (!bookMetadata.tags.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.tags = opfMetadata.tags
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length && (!bookMetadata.genres.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.genres = opfMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (opfMetadata.authors?.length && (!bookMetadata.authors.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.authors = opfMetadata.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (opfMetadata.narrators?.length && (!bookMetadata.narrators.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.narrators = opfMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (opfMetadata.series && (!bookMetadata.series.length || opfMetadataOverrideDetails)) {
|
||||
bookMetadata.series = [{
|
||||
name: opfMetadata.series,
|
||||
sequence: opfMetadata.sequence || null
|
||||
}]
|
||||
}
|
||||
} else if (opfMetadata[key] && (!bookMetadata[key] || opfMetadataOverrideDetails)) {
|
||||
bookMetadata[key] = opfMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If metadata.json or metadata.abs use this for metadata
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile || libraryItemData.metadataAbsLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
let metadataFileFormat = libraryItemData.metadataJsonLibraryFile ? 'json' : 'abs'
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
let altFormat = global.ServerSettings.metadataFileFormat === 'json' ? 'abs' : 'json'
|
||||
// First check the metadata format set in server settings, fallback to the alternate
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
} else if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.${altFormat}`))) {
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${altFormat}`)
|
||||
metadataFileFormat = altFormat
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}" - preferring`)
|
||||
let abMetadata = null
|
||||
if (metadataFileFormat === 'json') {
|
||||
abMetadata = abmetadataGenerator.parseJson(metadataText)
|
||||
const bookMetadataSourceHandler = new BookScanner.BookMetadataSourceHandler(bookMetadata, audioFiles, ebookFileScanData, libraryItemData, libraryScan, existingLibraryItemId)
|
||||
const metadataPrecedence = librarySettings.metadataPrecedence || ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
libraryScan.addLog(LogLevel.DEBUG, `"${bookMetadata.title}" Getting metadata with precedence [${metadataPrecedence.join(', ')}]`)
|
||||
for (const metadataSource of metadataPrecedence) {
|
||||
if (bookMetadataSourceHandler[metadataSource]) {
|
||||
await bookMetadataSourceHandler[metadataSource]()
|
||||
} else {
|
||||
abMetadata = abmetadataGenerator.parse(metadataText, 'book')
|
||||
libraryScan.addLog(LogLevel.ERROR, `Invalid metadata source "${metadataSource}"`)
|
||||
}
|
||||
|
||||
if (abMetadata) {
|
||||
if (abMetadata.tags?.length) {
|
||||
bookMetadata.tags = abMetadata.tags
|
||||
}
|
||||
if (abMetadata.chapters?.length) {
|
||||
bookMetadata.chapters = abMetadata.chapters
|
||||
}
|
||||
for (const key in abMetadata.metadata) {
|
||||
if (abMetadata.metadata[key] === undefined) continue
|
||||
bookMetadata[key] = abMetadata.metadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set chapters from audio files if not already set
|
||||
if (!bookMetadata.chapters.length) {
|
||||
bookMetadata.chapters = this.getChaptersFromAudioFiles(bookMetadata.title, audioFiles, libraryScan)
|
||||
}
|
||||
|
||||
// Set cover from library file if one is found otherwise check audiofile
|
||||
|
|
@ -781,102 +645,115 @@ class BookScanner {
|
|||
return bookMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a genre string into multiple genres
|
||||
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
|
||||
* @param {string} genreTag
|
||||
* @returns {string[]}
|
||||
*/
|
||||
parseGenresString(genreTag) {
|
||||
if (!genreTag?.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bookTitle
|
||||
* @param {AudioFile[]} audioFiles
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {import('../models/Book').ChapterObject[]}
|
||||
*/
|
||||
getChaptersFromAudioFiles(bookTitle, audioFiles, libraryScan) {
|
||||
if (!audioFiles.length) return []
|
||||
|
||||
// If overdrive media markers are present and preferred, use those instead
|
||||
if (Database.serverSettings.scannerPreferOverdriveMediaMarker) {
|
||||
const overdriveChapters = parseOverdriveMediaMarkersAsChapters(audioFiles)
|
||||
if (overdriveChapters) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, 'Overdrive Media Markers and preference found! Using these for chapter definitions')
|
||||
|
||||
return overdriveChapters
|
||||
}
|
||||
static BookMetadataSourceHandler = class {
|
||||
/**
|
||||
*
|
||||
* @param {Object} bookMetadata
|
||||
* @param {import('../models/Book').AudioFileObject[]} audioFiles
|
||||
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @param {string} existingLibraryItemId
|
||||
*/
|
||||
constructor(bookMetadata, audioFiles, ebookFileScanData, libraryItemData, libraryScan, existingLibraryItemId) {
|
||||
this.bookMetadata = bookMetadata
|
||||
this.audioFiles = audioFiles
|
||||
this.ebookFileScanData = ebookFileScanData
|
||||
this.libraryItemData = libraryItemData
|
||||
this.libraryScan = libraryScan
|
||||
this.existingLibraryItemId = existingLibraryItemId
|
||||
}
|
||||
|
||||
let chapters = []
|
||||
/**
|
||||
* Metadata parsed from folder names/structure
|
||||
*/
|
||||
folderStructure() {
|
||||
this.libraryItemData.setBookMetadataFromFilenames(this.bookMetadata)
|
||||
}
|
||||
|
||||
// If first audio file has embedded chapters then use embedded chapters
|
||||
if (audioFiles[0].chapters?.length) {
|
||||
// If all files chapters are the same, then only make chapters for the first file
|
||||
if (
|
||||
audioFiles.length === 1 ||
|
||||
audioFiles.length > 1 &&
|
||||
audioFiles[0].chapters.length === audioFiles[1].chapters?.length &&
|
||||
audioFiles[0].chapters.every((c, i) => c.title === audioFiles[1].chapters[i].title)
|
||||
) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters in first audio file ${audioFiles[0].metadata?.path}`)
|
||||
chapters = audioFiles[0].chapters.map((c) => ({ ...c }))
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters from all audio files ${audioFiles[0].metadata?.path}`)
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
const afChapters = file.chapters?.map((c) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime,
|
||||
})) ?? []
|
||||
chapters = chapters.concat(afChapters)
|
||||
|
||||
currChapterId += file.chapters?.length ?? 0
|
||||
currStartTime += file.duration
|
||||
/**
|
||||
* Metadata from audio file meta tags OR metadata from ebook file
|
||||
*/
|
||||
audioMetatags() {
|
||||
if (this.audioFiles.length) {
|
||||
// Modifies bookMetadata with metadata mapped from audio file meta tags
|
||||
const bookTitle = this.bookMetadata.title || this.libraryItemData.mediaMetadata.title
|
||||
AudioFileScanner.setBookMetadataFromAudioMetaTags(bookTitle, this.audioFiles, this.bookMetadata, this.libraryScan)
|
||||
} else if (this.ebookFileScanData) {
|
||||
const ebookMetdataObject = this.ebookFileScanData.metadata
|
||||
for (const key in ebookMetdataObject) {
|
||||
if (key === 'tags') {
|
||||
if (ebookMetdataObject.tags.length) {
|
||||
this.bookMetadata.tags = ebookMetdataObject.tags
|
||||
}
|
||||
} else if (key === 'genres') {
|
||||
if (ebookMetdataObject.genres.length) {
|
||||
this.bookMetadata.genres = ebookMetdataObject.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (ebookMetdataObject.authors?.length) {
|
||||
this.bookMetadata.authors = ebookMetdataObject.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (ebookMetdataObject.narrators?.length) {
|
||||
this.bookMetadata.narrators = ebookMetdataObject.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (ebookMetdataObject.series?.length) {
|
||||
this.bookMetadata.series = ebookMetdataObject.series
|
||||
}
|
||||
} else if (ebookMetdataObject[key] && key !== 'sequence') {
|
||||
this.bookMetadata[key] = ebookMetdataObject[key]
|
||||
}
|
||||
})
|
||||
return chapters
|
||||
}
|
||||
} else if (audioFiles.length > 1) {
|
||||
const preferAudioMetadata = !!Database.serverSettings.scannerPreferAudioMetadata
|
||||
|
||||
// Build chapters from audio files
|
||||
let currChapterId = 0
|
||||
let currStartTime = 0
|
||||
audioFiles.forEach((file) => {
|
||||
if (file.duration) {
|
||||
let title = file.metadata.filename ? Path.basename(file.metadata.filename, Path.extname(file.metadata.filename)) : `Chapter ${currChapterId}`
|
||||
|
||||
// When prefer audio metadata server setting is set then use ID3 title tag as long as it is not the same as the book title
|
||||
if (preferAudioMetadata && file.metaTags?.tagTitle && file.metaTags?.tagTitle !== bookTitle) {
|
||||
title = file.metaTags.tagTitle
|
||||
}
|
||||
|
||||
chapters.push({
|
||||
id: currChapterId++,
|
||||
start: currStartTime,
|
||||
end: currStartTime + file.duration,
|
||||
title
|
||||
})
|
||||
currStartTime += file.duration
|
||||
}
|
||||
})
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata from .nfo file
|
||||
*/
|
||||
async nfoFile() {
|
||||
if (!this.libraryItemData.metadataNfoLibraryFile) return
|
||||
await NfoFileScanner.scanBookNfoFile(this.libraryItemData.metadataNfoLibraryFile, this.bookMetadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Description from desc.txt and narrator from reader.txt
|
||||
*/
|
||||
async txtFiles() {
|
||||
// If desc.txt in library item folder then use this for description
|
||||
if (this.libraryItemData.descTxtLibraryFile) {
|
||||
const description = await readTextFile(this.libraryItemData.descTxtLibraryFile.metadata.path)
|
||||
if (description.trim()) this.bookMetadata.description = description.trim()
|
||||
}
|
||||
|
||||
// If reader.txt in library item folder then use this for narrator
|
||||
if (this.libraryItemData.readerTxtLibraryFile) {
|
||||
let narrator = await readTextFile(this.libraryItemData.readerTxtLibraryFile.metadata.path)
|
||||
narrator = narrator.split(/\r?\n/)[0]?.trim() || '' // Only use first line
|
||||
if (narrator) {
|
||||
this.bookMetadata.narrators = parseNameString.parse(narrator)?.names || []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata from opf file
|
||||
*/
|
||||
async opfFile() {
|
||||
if (!this.libraryItemData.metadataOpfLibraryFile) return
|
||||
await OpfFileScanner.scanBookOpfFile(this.libraryItemData.metadataOpfLibraryFile, this.bookMetadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata from metadata.json
|
||||
*/
|
||||
async absMetadata() {
|
||||
// If metadata.json use this for metadata
|
||||
await AbsMetadataFileScanner.scanBookMetadataFile(this.libraryScan, this.libraryItemData, this.bookMetadata, this.existingLibraryItemId)
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -896,121 +773,66 @@ class BookScanner {
|
|||
await fsExtra.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${metadataFileFormat}`)
|
||||
if (metadataFileFormat === 'json') {
|
||||
// Remove metadata.abs if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.abs`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.abs for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.abs`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.abs`)))
|
||||
}
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
|
||||
// TODO: Update to not use `metadata` so it fits the updated model
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
chapters: libraryItem.media.chapters?.map(c => ({ ...c })) || [],
|
||||
metadata: {
|
||||
title: libraryItem.media.title,
|
||||
subtitle: libraryItem.media.subtitle,
|
||||
authors: libraryItem.media.authors.map(a => a.name),
|
||||
narrators: libraryItem.media.narrators,
|
||||
series: libraryItem.media.series.map(se => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}),
|
||||
genres: libraryItem.media.genres || [],
|
||||
publishedYear: libraryItem.media.publishedYear,
|
||||
publishedDate: libraryItem.media.publishedDate,
|
||||
publisher: libraryItem.media.publisher,
|
||||
description: libraryItem.media.description,
|
||||
isbn: libraryItem.media.isbn,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
abridged: !!libraryItem.media.abridged
|
||||
}
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
} else {
|
||||
// Remove metadata.json if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.json`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.json for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.json`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.json`)))
|
||||
}
|
||||
|
||||
return abmetadataGenerator.generateFromNewModel(libraryItem, metadataFilePath).then(async (success) => {
|
||||
if (!success) {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed saving abmetadata to "${metadataFilePath}"`)
|
||||
return null
|
||||
}
|
||||
// Add metadata.abs to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.abs`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
return metadataLibraryFile
|
||||
})
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
chapters: libraryItem.media.chapters?.map(c => ({ ...c })) || [],
|
||||
title: libraryItem.media.title,
|
||||
subtitle: libraryItem.media.subtitle,
|
||||
authors: libraryItem.media.authors.map(a => a.name),
|
||||
narrators: libraryItem.media.narrators,
|
||||
series: libraryItem.media.series.map(se => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}),
|
||||
genres: libraryItem.media.genres || [],
|
||||
publishedYear: libraryItem.media.publishedYear,
|
||||
publishedDate: libraryItem.media.publishedDate,
|
||||
publisher: libraryItem.media.publisher,
|
||||
description: libraryItem.media.description,
|
||||
isbn: libraryItem.media.isbn,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
abridged: !!libraryItem.media.abridged
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class LibraryItemScanData {
|
|||
this.relPath = data.relPath
|
||||
/** @type {boolean} */
|
||||
this.isFile = data.isFile
|
||||
/** @type {{author:string, title:string, subtitle:string, series:string, sequence:string, publishedYear:string, narrators:string}} */
|
||||
/** @type {import('../utils/scandir').LibraryItemFilenameMetadata} */
|
||||
this.mediaMetadata = data.mediaMetadata
|
||||
/** @type {import('../objects/files/LibraryFile')[]} */
|
||||
this.libraryFiles = data.libraryFiles
|
||||
|
|
@ -132,6 +132,11 @@ class LibraryItemScanData {
|
|||
return this.libraryFiles.find(lf => lf.metadata.ext.toLowerCase() === '.opf')
|
||||
}
|
||||
|
||||
/** @type {LibraryItem.LibraryFileObject} */
|
||||
get metadataNfoLibraryFile() {
|
||||
return this.libraryFiles.find(lf => lf.metadata.ext.toLowerCase() === '.nfo')
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {LibraryItem} existingLibraryItem
|
||||
|
|
@ -233,10 +238,9 @@ class LibraryItemScanData {
|
|||
}
|
||||
await existingLibraryItem.save()
|
||||
return true
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Library item "${existingLibraryItem.relPath}" is up-to-date`)
|
||||
return false
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -303,5 +307,34 @@ class LibraryItemScanData {
|
|||
|
||||
return !this.ebookLibraryFiles.some(lf => lf.ino === ebookFile.ino)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set data parsed from filenames
|
||||
*
|
||||
* @param {Object} bookMetadata
|
||||
*/
|
||||
setBookMetadataFromFilenames(bookMetadata) {
|
||||
const keysToMap = ['title', 'subtitle', 'publishedYear', 'asin']
|
||||
for (const key in this.mediaMetadata) {
|
||||
if (keysToMap.includes(key) && this.mediaMetadata[key]) {
|
||||
bookMetadata[key] = this.mediaMetadata[key]
|
||||
}
|
||||
}
|
||||
|
||||
if (this.mediaMetadata.authors?.length) {
|
||||
bookMetadata.authors = this.mediaMetadata.authors
|
||||
}
|
||||
if (this.mediaMetadata.narrators?.length) {
|
||||
bookMetadata.narrators = this.mediaMetadata.narrators
|
||||
}
|
||||
if (this.mediaMetadata.seriesName) {
|
||||
bookMetadata.series = [
|
||||
{
|
||||
name: this.mediaMetadata.seriesName,
|
||||
sequence: this.mediaMetadata.seriesSequence || null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = LibraryItemScanData
|
||||
|
|
@ -21,9 +21,10 @@ class LibraryItemScanner {
|
|||
* Scan single library item
|
||||
*
|
||||
* @param {string} libraryItemId
|
||||
* @param {{relPath:string, path:string}} [renamedPaths] used by watcher when item folder was renamed
|
||||
* @returns {number} ScanResult
|
||||
*/
|
||||
async scanLibraryItem(libraryItemId) {
|
||||
async scanLibraryItem(libraryItemId, renamedPaths = null) {
|
||||
// TODO: Add task manager
|
||||
const libraryItem = await Database.libraryItemModel.findByPk(libraryItemId)
|
||||
if (!libraryItem) {
|
||||
|
|
@ -50,27 +51,25 @@ class LibraryItemScanner {
|
|||
|
||||
const scanLogger = new ScanLogger()
|
||||
scanLogger.verbose = true
|
||||
scanLogger.setData('libraryItem', libraryItemId)
|
||||
scanLogger.setData('libraryItem', renamedPaths?.relPath || libraryItem.relPath)
|
||||
|
||||
const libraryItemPath = fileUtils.filePathToPOSIX(libraryItem.path)
|
||||
const libraryItemPath = renamedPaths?.path || fileUtils.filePathToPOSIX(libraryItem.path)
|
||||
const folder = library.libraryFolders[0]
|
||||
const libraryItemScanData = await this.getLibraryItemScanData(libraryItemPath, library, folder, false)
|
||||
|
||||
if (await libraryItemScanData.checkLibraryItemData(libraryItem, scanLogger)) {
|
||||
if (libraryItemScanData.hasLibraryFileChanges || libraryItemScanData.hasPathChange) {
|
||||
const expandedLibraryItem = await this.rescanLibraryItem(libraryItem, libraryItemScanData, library.settings, scanLogger)
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(expandedLibraryItem)
|
||||
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
|
||||
let libraryItemDataUpdated = await libraryItemScanData.checkLibraryItemData(libraryItem, scanLogger)
|
||||
|
||||
await this.checkAuthorsAndSeriesRemovedFromBooks(library.id, scanLogger)
|
||||
} else {
|
||||
// TODO: Temporary while using old model to socket emit
|
||||
const oldLibraryItem = await Database.libraryItemModel.getOldById(libraryItem.id)
|
||||
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
|
||||
}
|
||||
const { libraryItem: expandedLibraryItem, wasUpdated } = await this.rescanLibraryItemMedia(libraryItem, libraryItemScanData, library.settings, scanLogger)
|
||||
if (libraryItemDataUpdated || wasUpdated) {
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(expandedLibraryItem)
|
||||
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
|
||||
|
||||
await this.checkAuthorsAndSeriesRemovedFromBooks(library.id, scanLogger)
|
||||
|
||||
return ScanResult.UPDATED
|
||||
}
|
||||
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Library item is up-to-date`)
|
||||
return ScanResult.UPTODATE
|
||||
}
|
||||
|
||||
|
|
@ -156,16 +155,14 @@ class LibraryItemScanner {
|
|||
* @param {LibraryItemScanData} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<LibraryItem>}
|
||||
* @returns {Promise<{libraryItem:LibraryItem, wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
let newLibraryItem = null
|
||||
rescanLibraryItemMedia(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
if (existingLibraryItem.mediaType === 'book') {
|
||||
newLibraryItem = await BookScanner.rescanExistingBookLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
return BookScanner.rescanExistingBookLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
} else {
|
||||
newLibraryItem = await PodcastScanner.rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
return PodcastScanner.rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan)
|
||||
}
|
||||
return newLibraryItem
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ const date = require('../libs/dateAndTime')
|
|||
const Logger = require('../Logger')
|
||||
const Library = require('../objects/Library')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { secondsToTimestamp } = require('../utils/index')
|
||||
const { secondsToTimestamp, elapsedPretty } = require('../utils/index')
|
||||
|
||||
class LibraryScan {
|
||||
constructor() {
|
||||
|
|
@ -67,6 +67,15 @@ class LibraryScan {
|
|||
get logFilename() {
|
||||
return date.format(new Date(), 'YYYY-MM-DD') + '_' + this.id + '.txt'
|
||||
}
|
||||
get scanResultsString() {
|
||||
if (this.error) return this.error
|
||||
const strs = []
|
||||
if (this.resultsAdded) strs.push(`${this.resultsAdded} added`)
|
||||
if (this.resultsUpdated) strs.push(`${this.resultsUpdated} updated`)
|
||||
if (this.resultsMissing) strs.push(`${this.resultsMissing} missing`)
|
||||
if (!strs.length) return `Everything was up to date (${elapsedPretty(this.elapsed / 1000)})`
|
||||
return strs.join(', ') + ` (${elapsedPretty(this.elapsed / 1000)})`
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -9,9 +9,11 @@ const fileUtils = require('../utils/fileUtils')
|
|||
const scanUtils = require('../utils/scandir')
|
||||
const { LogLevel, ScanResult } = require('../utils/constants')
|
||||
const libraryFilters = require('../utils/queries/libraryFilters')
|
||||
const TaskManager = require('../managers/TaskManager')
|
||||
const LibraryItemScanner = require('./LibraryItemScanner')
|
||||
const LibraryScan = require('./LibraryScan')
|
||||
const LibraryItemScanData = require('./LibraryItemScanData')
|
||||
const Task = require('../objects/Task')
|
||||
|
||||
class LibraryScanner {
|
||||
constructor() {
|
||||
|
|
@ -19,7 +21,7 @@ class LibraryScanner {
|
|||
this.librariesScanning = []
|
||||
|
||||
this.scanningFilesChanged = false
|
||||
/** @type {import('../Watcher').PendingFileUpdate[][]} */
|
||||
/** @type {[import('../Watcher').PendingFileUpdate[], Task][]} */
|
||||
this.pendingFileUpdatesToScan = []
|
||||
}
|
||||
|
||||
|
|
@ -44,48 +46,69 @@ class LibraryScanner {
|
|||
/**
|
||||
*
|
||||
* @param {import('../objects/Library')} library
|
||||
* @param {*} options
|
||||
* @param {boolean} [forceRescan]
|
||||
*/
|
||||
async scan(library, options = {}) {
|
||||
async scan(library, forceRescan = false) {
|
||||
if (this.isLibraryScanning(library.id)) {
|
||||
Logger.error(`[Scanner] Already scanning ${library.id}`)
|
||||
Logger.error(`[LibraryScanner] Already scanning ${library.id}`)
|
||||
return
|
||||
}
|
||||
|
||||
if (!library.folders.length) {
|
||||
Logger.warn(`[Scanner] Library has no folders to scan "${library.name}"`)
|
||||
Logger.warn(`[LibraryScanner] Library has no folders to scan "${library.name}"`)
|
||||
return
|
||||
}
|
||||
|
||||
if (library.isBook && library.settings.metadataPrecedence.join() !== library.lastScanMetadataPrecedence?.join()) {
|
||||
const lastScanMetadataPrecedence = library.lastScanMetadataPrecedence?.join() || 'Unset'
|
||||
Logger.info(`[LibraryScanner] Library metadata precedence changed since last scan. From [${lastScanMetadataPrecedence}] to [${library.settings.metadataPrecedence.join()}]`)
|
||||
forceRescan = true
|
||||
}
|
||||
|
||||
const libraryScan = new LibraryScan()
|
||||
libraryScan.setData(library)
|
||||
libraryScan.verbose = true
|
||||
this.librariesScanning.push(libraryScan.getScanEmitData)
|
||||
|
||||
SocketAuthority.emitter('scan_start', libraryScan.getScanEmitData)
|
||||
const taskData = {
|
||||
libraryId: library.id,
|
||||
libraryName: library.name,
|
||||
libraryMediaType: library.mediaType
|
||||
}
|
||||
const task = TaskManager.createAndAddTask('library-scan', `Scanning "${library.name}" library`, null, true, taskData)
|
||||
|
||||
Logger.info(`[Scanner] Starting library scan ${libraryScan.id} for ${libraryScan.libraryName}`)
|
||||
Logger.info(`[LibraryScanner] Starting${forceRescan ? ' (forced)' : ''} library scan ${libraryScan.id} for ${libraryScan.libraryName}`)
|
||||
|
||||
const canceled = await this.scanLibrary(libraryScan)
|
||||
const canceled = await this.scanLibrary(libraryScan, forceRescan)
|
||||
|
||||
if (canceled) {
|
||||
Logger.info(`[Scanner] Library scan canceled for "${libraryScan.libraryName}"`)
|
||||
Logger.info(`[LibraryScanner] Library scan canceled for "${libraryScan.libraryName}"`)
|
||||
delete this.cancelLibraryScan[libraryScan.libraryId]
|
||||
}
|
||||
|
||||
libraryScan.setComplete()
|
||||
|
||||
Logger.info(`[Scanner] Library scan ${libraryScan.id} completed in ${libraryScan.elapsedTimestamp} | ${libraryScan.resultStats}`)
|
||||
Logger.info(`[LibraryScanner] Library scan ${libraryScan.id} completed in ${libraryScan.elapsedTimestamp} | ${libraryScan.resultStats}`)
|
||||
this.librariesScanning = this.librariesScanning.filter(ls => ls.id !== library.id)
|
||||
|
||||
if (canceled && !libraryScan.totalResults) {
|
||||
task.setFinished('Scan canceled')
|
||||
TaskManager.taskFinished(task)
|
||||
|
||||
const emitData = libraryScan.getScanEmitData
|
||||
emitData.results = null
|
||||
SocketAuthority.emitter('scan_complete', emitData)
|
||||
return
|
||||
}
|
||||
|
||||
SocketAuthority.emitter('scan_complete', libraryScan.getScanEmitData)
|
||||
library.lastScan = Date.now()
|
||||
library.lastScanVersion = packageJson.version
|
||||
if (library.isBook) {
|
||||
library.lastScanMetadataPrecedence = library.settings.metadataPrecedence
|
||||
}
|
||||
await Database.libraryModel.updateFromOld(library)
|
||||
|
||||
task.setFinished(libraryScan.scanResultsString)
|
||||
TaskManager.taskFinished(task)
|
||||
|
||||
if (libraryScan.totalResults) {
|
||||
libraryScan.saveLog()
|
||||
|
|
@ -95,9 +118,10 @@ class LibraryScanner {
|
|||
/**
|
||||
*
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {boolean} true if scan canceled
|
||||
* @param {boolean} forceRescan
|
||||
* @returns {Promise<boolean>} true if scan canceled
|
||||
*/
|
||||
async scanLibrary(libraryScan) {
|
||||
async scanLibrary(libraryScan, forceRescan) {
|
||||
// Make sure library filter data is set
|
||||
// this is used to check for existing authors & series
|
||||
await libraryFilters.getFilterData(libraryScan.library.mediaType, libraryScan.libraryId)
|
||||
|
|
@ -155,17 +179,25 @@ class LibraryScanner {
|
|||
}
|
||||
} else {
|
||||
libraryItemDataFound = libraryItemDataFound.filter(lidf => lidf !== libraryItemData)
|
||||
if (await libraryItemData.checkLibraryItemData(existingLibraryItem, libraryScan)) {
|
||||
libraryScan.resultsUpdated++
|
||||
if (libraryItemData.hasLibraryFileChanges || libraryItemData.hasPathChange) {
|
||||
const libraryItem = await LibraryItemScanner.rescanLibraryItem(existingLibraryItem, libraryItemData, libraryScan.library.settings, libraryScan)
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
|
||||
oldLibraryItemsUpdated.push(oldLibraryItem)
|
||||
let libraryItemDataUpdated = await libraryItemData.checkLibraryItemData(existingLibraryItem, libraryScan)
|
||||
if (libraryItemDataUpdated || forceRescan) {
|
||||
if (forceRescan || libraryItemData.hasLibraryFileChanges || libraryItemData.hasPathChange) {
|
||||
const { libraryItem, wasUpdated } = await LibraryItemScanner.rescanLibraryItemMedia(existingLibraryItem, libraryItemData, libraryScan.library.settings, libraryScan)
|
||||
if (!forceRescan || wasUpdated) {
|
||||
libraryScan.resultsUpdated++
|
||||
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
|
||||
oldLibraryItemsUpdated.push(oldLibraryItem)
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Library item "${existingLibraryItem.relPath}" is up-to-date`)
|
||||
}
|
||||
} else {
|
||||
libraryScan.resultsUpdated++
|
||||
// TODO: Temporary while using old model to socket emit
|
||||
const oldLibraryItem = await Database.libraryItemModel.getOldById(existingLibraryItem.id)
|
||||
oldLibraryItemsUpdated.push(oldLibraryItem)
|
||||
}
|
||||
} else {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Library item "${existingLibraryItem.relPath}" is up-to-date`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -304,18 +336,25 @@ class LibraryScanner {
|
|||
/**
|
||||
* Scan files changed from Watcher
|
||||
* @param {import('../Watcher').PendingFileUpdate[]} fileUpdates
|
||||
* @param {Task} pendingTask
|
||||
*/
|
||||
async scanFilesChanged(fileUpdates) {
|
||||
async scanFilesChanged(fileUpdates, pendingTask) {
|
||||
if (!fileUpdates?.length) return
|
||||
|
||||
// If already scanning files from watcher then add these updates to queue
|
||||
if (this.scanningFilesChanged) {
|
||||
this.pendingFileUpdatesToScan.push(fileUpdates)
|
||||
this.pendingFileUpdatesToScan.push([fileUpdates, pendingTask])
|
||||
Logger.debug(`[LibraryScanner] Already scanning files from watcher - file updates pushed to queue (size ${this.pendingFileUpdatesToScan.length})`)
|
||||
return
|
||||
}
|
||||
this.scanningFilesChanged = true
|
||||
|
||||
const results = {
|
||||
added: 0,
|
||||
updated: 0,
|
||||
removed: 0
|
||||
}
|
||||
|
||||
// files grouped by folder
|
||||
const folderGroups = this.getFileUpdatesGrouped(fileUpdates)
|
||||
|
||||
|
|
@ -346,17 +385,42 @@ class LibraryScanner {
|
|||
const folderScanResults = await this.scanFolderUpdates(library, folder, fileUpdateGroup)
|
||||
Logger.debug(`[LibraryScanner] Folder scan results`, folderScanResults)
|
||||
|
||||
// Tally results to share with client
|
||||
let resetFilterData = false
|
||||
Object.values(folderScanResults).forEach((scanResult) => {
|
||||
if (scanResult === ScanResult.ADDED) {
|
||||
resetFilterData = true
|
||||
results.added++
|
||||
} else if (scanResult === ScanResult.REMOVED) {
|
||||
resetFilterData = true
|
||||
results.removed++
|
||||
} else if (scanResult === ScanResult.UPDATED) {
|
||||
resetFilterData = true
|
||||
results.updated++
|
||||
}
|
||||
})
|
||||
|
||||
// If something was updated then reset numIssues filter data for library
|
||||
if (Object.values(folderScanResults).some(scanResult => scanResult !== ScanResult.NOTHING && scanResult !== ScanResult.UPTODATE)) {
|
||||
if (resetFilterData) {
|
||||
await Database.resetLibraryIssuesFilterData(libraryId)
|
||||
}
|
||||
}
|
||||
|
||||
// Complete task and send results to client
|
||||
const resultStrs = []
|
||||
if (results.added) resultStrs.push(`${results.added} added`)
|
||||
if (results.updated) resultStrs.push(`${results.updated} updated`)
|
||||
if (results.removed) resultStrs.push(`${results.removed} missing`)
|
||||
let scanResultStr = 'Scan finished with no changes'
|
||||
if (resultStrs.length) scanResultStr = resultStrs.join(', ')
|
||||
pendingTask.setFinished(scanResultStr)
|
||||
TaskManager.taskFinished(pendingTask)
|
||||
|
||||
this.scanningFilesChanged = false
|
||||
|
||||
if (this.pendingFileUpdatesToScan.length) {
|
||||
Logger.debug(`[LibraryScanner] File updates finished scanning with more updates in queue (${this.pendingFileUpdatesToScan.length})`)
|
||||
this.scanFilesChanged(this.pendingFileUpdatesToScan.shift())
|
||||
this.scanFilesChanged(...this.pendingFileUpdatesToScan.shift())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -399,7 +463,7 @@ class LibraryScanner {
|
|||
// Test Case: Moving audio files from library item folder to author folder should trigger a re-scan of the item
|
||||
const updateGroup = { ...fileUpdateGroup }
|
||||
for (const itemDir in updateGroup) {
|
||||
if (itemDir == fileUpdateGroup[itemDir]) continue // Media in root path
|
||||
if (isSingleMediaFile(fileUpdateGroup, itemDir)) continue // Media in root path
|
||||
|
||||
const itemDirNestedFiles = fileUpdateGroup[itemDir].filter(b => b.includes('/'))
|
||||
if (!itemDirNestedFiles.length) continue
|
||||
|
|
@ -461,6 +525,7 @@ class LibraryScanner {
|
|||
path: potentialChildDirs
|
||||
})
|
||||
|
||||
let renamedPaths = {}
|
||||
if (!existingLibraryItem) {
|
||||
const dirIno = await fileUtils.getIno(fullPath)
|
||||
existingLibraryItem = await Database.libraryItemModel.findOneOld({
|
||||
|
|
@ -471,6 +536,8 @@ class LibraryScanner {
|
|||
// Update library item paths for scan
|
||||
existingLibraryItem.path = fullPath
|
||||
existingLibraryItem.relPath = itemDir
|
||||
renamedPaths.path = fullPath
|
||||
renamedPaths.relPath = itemDir
|
||||
}
|
||||
}
|
||||
if (existingLibraryItem) {
|
||||
|
|
@ -490,9 +557,9 @@ class LibraryScanner {
|
|||
|
||||
// Scan library item for updates
|
||||
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.metadata.title}" - scan for updates`)
|
||||
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id)
|
||||
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, renamedPaths)
|
||||
continue
|
||||
} else if (library.settings.audiobooksOnly && !fileUpdateGroup[itemDir].some?.(scanUtils.checkFilepathIsAudioFile)) {
|
||||
} else if (library.settings.audiobooksOnly && !hasAudioFiles(fileUpdateGroup, itemDir)) {
|
||||
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" has no audio files`)
|
||||
continue
|
||||
}
|
||||
|
|
@ -513,7 +580,7 @@ class LibraryScanner {
|
|||
}
|
||||
|
||||
Logger.debug(`[LibraryScanner] Folder update group must be a new item "${itemDir}" in library "${library.name}"`)
|
||||
const isSingleMediaItem = itemDir === fileUpdateGroup[itemDir]
|
||||
const isSingleMediaItem = isSingleMediaFile(fileUpdateGroup, itemDir)
|
||||
const newLibraryItem = await LibraryItemScanner.scanPotentialNewLibraryItem(fullPath, library, folder, isSingleMediaItem)
|
||||
if (newLibraryItem) {
|
||||
const oldNewLibraryItem = Database.libraryItemModel.getOldLibraryItem(newLibraryItem)
|
||||
|
|
@ -525,4 +592,14 @@ class LibraryScanner {
|
|||
return itemGroupingResults
|
||||
}
|
||||
}
|
||||
module.exports = new LibraryScanner()
|
||||
module.exports = new LibraryScanner()
|
||||
|
||||
function hasAudioFiles(fileUpdateGroup, itemDir) {
|
||||
return isSingleMediaFile(fileUpdateGroup, itemDir) ?
|
||||
scanUtils.checkFilepathIsAudioFile(fileUpdateGroup[itemDir]) :
|
||||
fileUpdateGroup[itemDir].some(scanUtils.checkFilepathIsAudioFile)
|
||||
}
|
||||
|
||||
function isSingleMediaFile(fileUpdateGroup, itemDir) {
|
||||
return itemDir === fileUpdateGroup[itemDir]
|
||||
}
|
||||
|
|
|
|||
48
server/scanner/NfoFileScanner.js
Normal file
48
server/scanner/NfoFileScanner.js
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
const { parseNfoMetadata } = require('../utils/parsers/parseNfoMetadata')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
|
||||
class NfoFileScanner {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* Parse metadata from .nfo file found in library scan and update bookMetadata
|
||||
*
|
||||
* @param {import('../models/LibraryItem').LibraryFileObject} nfoLibraryFileObj
|
||||
* @param {Object} bookMetadata
|
||||
*/
|
||||
async scanBookNfoFile(nfoLibraryFileObj, bookMetadata) {
|
||||
const nfoText = await readTextFile(nfoLibraryFileObj.metadata.path)
|
||||
const nfoMetadata = nfoText ? await parseNfoMetadata(nfoText) : null
|
||||
if (nfoMetadata) {
|
||||
for (const key in nfoMetadata) {
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (nfoMetadata.tags.length) {
|
||||
bookMetadata.tags = nfoMetadata.tags
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (nfoMetadata.genres.length) {
|
||||
bookMetadata.genres = nfoMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (nfoMetadata.authors?.length) {
|
||||
bookMetadata.authors = nfoMetadata.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (nfoMetadata.narrators?.length) {
|
||||
bookMetadata.narrators = nfoMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (nfoMetadata.series) {
|
||||
bookMetadata.series = [{
|
||||
name: nfoMetadata.series,
|
||||
sequence: nfoMetadata.sequence || null
|
||||
}]
|
||||
}
|
||||
} else if (nfoMetadata[key] && key !== 'sequence') {
|
||||
bookMetadata[key] = nfoMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = new NfoFileScanner()
|
||||
45
server/scanner/OpfFileScanner.js
Normal file
45
server/scanner/OpfFileScanner.js
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
const { parseOpfMetadataXML } = require('../utils/parsers/parseOpfMetadata')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
|
||||
class OpfFileScanner {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* Parse metadata from .opf file found in library scan and update bookMetadata
|
||||
*
|
||||
* @param {import('../models/LibraryItem').LibraryFileObject} opfLibraryFileObj
|
||||
* @param {Object} bookMetadata
|
||||
*/
|
||||
async scanBookOpfFile(opfLibraryFileObj, bookMetadata) {
|
||||
const xmlText = await readTextFile(opfLibraryFileObj.metadata.path)
|
||||
const opfMetadata = xmlText ? await parseOpfMetadataXML(xmlText) : null
|
||||
if (opfMetadata) {
|
||||
for (const key in opfMetadata) {
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length) {
|
||||
bookMetadata.tags = opfMetadata.tags
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length) {
|
||||
bookMetadata.genres = opfMetadata.genres
|
||||
}
|
||||
} else if (key === 'authors') {
|
||||
if (opfMetadata.authors?.length) {
|
||||
bookMetadata.authors = opfMetadata.authors
|
||||
}
|
||||
} else if (key === 'narrators') {
|
||||
if (opfMetadata.narrators?.length) {
|
||||
bookMetadata.narrators = opfMetadata.narrators
|
||||
}
|
||||
} else if (key === 'series') {
|
||||
if (opfMetadata.series?.length) {
|
||||
bookMetadata.series = opfMetadata.series
|
||||
}
|
||||
} else if (opfMetadata[key] && key !== 'sequence') {
|
||||
bookMetadata[key] = opfMetadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = new OpfFileScanner()
|
||||
|
|
@ -2,15 +2,15 @@ const uuidv4 = require("uuid").v4
|
|||
const Path = require('path')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { getTitleIgnorePrefix } = require('../utils/index')
|
||||
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
|
||||
const AudioFileScanner = require('./AudioFileScanner')
|
||||
const Database = require('../Database')
|
||||
const { readTextFile, filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
|
||||
const AudioFile = require('../objects/files/AudioFile')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const fsExtra = require("../libs/fsExtra")
|
||||
const PodcastEpisode = require("../models/PodcastEpisode")
|
||||
const AbsMetadataFileScanner = require("./AbsMetadataFileScanner")
|
||||
|
||||
/**
|
||||
* Metadata for podcasts pulled from files
|
||||
|
|
@ -39,7 +39,7 @@ class PodcastScanner {
|
|||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {Promise<import('../models/LibraryItem')>}
|
||||
* @returns {Promise<{libraryItem:import('../models/LibraryItem'), wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
/** @type {import('../models/Podcast')} */
|
||||
|
|
@ -87,7 +87,7 @@ class PodcastScanner {
|
|||
podcastEpisode.changed('audioFile', true)
|
||||
|
||||
// Set metadata and save episode
|
||||
this.setPodcastEpisodeMetadataFromAudioFile(podcastEpisode, libraryScan)
|
||||
AudioFileScanner.setPodcastEpisodeMetadataFromAudioMetaTags(podcastEpisode, libraryScan)
|
||||
libraryScan.addLog(LogLevel.INFO, `Podcast episode "${podcastEpisode.title}" keys changed [${podcastEpisode.changed()?.join(', ')}]`)
|
||||
await podcastEpisode.save()
|
||||
}
|
||||
|
|
@ -122,7 +122,7 @@ class PodcastScanner {
|
|||
}
|
||||
const newPodcastEpisode = Database.podcastEpisodeModel.build(newEpisode)
|
||||
// Set metadata and save new episode
|
||||
this.setPodcastEpisodeMetadataFromAudioFile(newPodcastEpisode, libraryScan)
|
||||
AudioFileScanner.setPodcastEpisodeMetadataFromAudioMetaTags(newPodcastEpisode, libraryScan)
|
||||
libraryScan.addLog(LogLevel.INFO, `New Podcast episode "${newPodcastEpisode.title}" added`)
|
||||
await newPodcastEpisode.save()
|
||||
existingPodcastEpisodes.push(newPodcastEpisode)
|
||||
|
|
@ -201,7 +201,10 @@ class PodcastScanner {
|
|||
await existingLibraryItem.save()
|
||||
}
|
||||
|
||||
return existingLibraryItem
|
||||
return {
|
||||
libraryItem: existingLibraryItem,
|
||||
wasUpdated: hasMediaChanges || libraryItemUpdated
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -239,7 +242,7 @@ class PodcastScanner {
|
|||
}
|
||||
|
||||
// Set metadata and save new episode
|
||||
this.setPodcastEpisodeMetadataFromAudioFile(newEpisode, libraryScan)
|
||||
AudioFileScanner.setPodcastEpisodeMetadataFromAudioMetaTags(newEpisode, libraryScan)
|
||||
libraryScan.addLog(LogLevel.INFO, `New Podcast episode "${newEpisode.title}" found`)
|
||||
newPodcastEpisodes.push(newEpisode)
|
||||
}
|
||||
|
|
@ -317,7 +320,7 @@ class PodcastScanner {
|
|||
async getPodcastMetadataFromScanData(podcastEpisodes, libraryItemData, libraryScan, existingLibraryItemId = null) {
|
||||
const podcastMetadata = {
|
||||
title: libraryItemData.mediaMetadata.title,
|
||||
titleIgnorePrefix: getTitleIgnorePrefix(libraryItemData.mediaMetadata.title),
|
||||
titleIgnorePrefix: undefined,
|
||||
author: undefined,
|
||||
releaseDate: undefined,
|
||||
feedURL: undefined,
|
||||
|
|
@ -333,133 +336,19 @@ class PodcastScanner {
|
|||
genres: []
|
||||
}
|
||||
|
||||
// Use audio meta tags
|
||||
if (podcastEpisodes.length) {
|
||||
const audioFileMetaTags = podcastEpisodes[0].audioFile.metaTags
|
||||
const overrideExistingDetails = Database.serverSettings.scannerPreferAudioMetadata
|
||||
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagAlbum',
|
||||
altTag: 'tagSeries',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagArtist',
|
||||
key: 'author'
|
||||
},
|
||||
{
|
||||
tag: 'tagGenre',
|
||||
key: 'genres'
|
||||
},
|
||||
{
|
||||
tag: 'tagLanguage',
|
||||
key: 'language'
|
||||
},
|
||||
{
|
||||
tag: 'tagItunesId',
|
||||
key: 'itunesId'
|
||||
},
|
||||
{
|
||||
tag: 'tagPodcastType',
|
||||
key: 'podcastType',
|
||||
}
|
||||
]
|
||||
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
tagToUse = mapping.altTag
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'genres' && (!podcastMetadata.genres.length || overrideExistingDetails)) {
|
||||
podcastMetadata.genres = this.parseGenresString(value)
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata.genres.join(', ')}`)
|
||||
} else if (!podcastMetadata[mapping.key] || overrideExistingDetails) {
|
||||
podcastMetadata[mapping.key] = value
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastMetadata[mapping.key]}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
AudioFileScanner.setPodcastMetadataFromAudioMetaTags(podcastEpisodes[0].audioFile, podcastMetadata, libraryScan)
|
||||
}
|
||||
|
||||
// If metadata.json or metadata.abs use this for metadata
|
||||
const metadataLibraryFile = libraryItemData.metadataJsonLibraryFile || libraryItemData.metadataAbsLibraryFile
|
||||
let metadataText = metadataLibraryFile ? await readTextFile(metadataLibraryFile.metadata.path) : null
|
||||
let metadataFilePath = metadataLibraryFile?.metadata.path
|
||||
let metadataFileFormat = libraryItemData.metadataJsonLibraryFile ? 'json' : 'abs'
|
||||
|
||||
// When metadata file is not stored with library item then check in the /metadata/items folder for it
|
||||
if (!metadataText && existingLibraryItemId) {
|
||||
let metadataPath = Path.join(global.MetadataPath, 'items', existingLibraryItemId)
|
||||
|
||||
let altFormat = global.ServerSettings.metadataFileFormat === 'json' ? 'abs' : 'json'
|
||||
// First check the metadata format set in server settings, fallback to the alternate
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
if (await fsExtra.pathExists(metadataFilePath)) {
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
} else if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.${altFormat}`))) {
|
||||
metadataFilePath = Path.join(metadataPath, `metadata.${altFormat}`)
|
||||
metadataFileFormat = altFormat
|
||||
metadataText = await readTextFile(metadataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
if (metadataText) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Found metadata file "${metadataFilePath}" - preferring`)
|
||||
let abMetadata = null
|
||||
if (metadataFileFormat === 'json') {
|
||||
abMetadata = abmetadataGenerator.parseJson(metadataText)
|
||||
} else {
|
||||
abMetadata = abmetadataGenerator.parse(metadataText, 'podcast')
|
||||
}
|
||||
|
||||
if (abMetadata) {
|
||||
if (abMetadata.tags?.length) {
|
||||
podcastMetadata.tags = abMetadata.tags
|
||||
}
|
||||
for (const key in abMetadata.metadata) {
|
||||
if (abMetadata.metadata[key] === undefined) continue
|
||||
|
||||
// TODO: New podcast model changed some keys, need to update the abmetadataGenerator
|
||||
let newModelKey = key
|
||||
if (key === 'feedUrl') newModelKey = 'feedURL'
|
||||
else if (key === 'imageUrl') newModelKey = 'imageURL'
|
||||
else if (key === 'itunesPageUrl') newModelKey = 'itunesPageURL'
|
||||
else if (key === 'type') newModelKey = 'podcastType'
|
||||
|
||||
podcastMetadata[newModelKey] = abMetadata.metadata[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
// Use metadata.json file
|
||||
await AbsMetadataFileScanner.scanPodcastMetadataFile(libraryScan, libraryItemData, podcastMetadata, existingLibraryItemId)
|
||||
|
||||
podcastMetadata.titleIgnorePrefix = getTitleIgnorePrefix(podcastMetadata.title)
|
||||
|
||||
return podcastMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a genre string into multiple genres
|
||||
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
|
||||
* @param {string} genreTag
|
||||
* @returns {string[]}
|
||||
*/
|
||||
parseGenresString(genreTag) {
|
||||
if (!genreTag?.length) return []
|
||||
const separators = ['/', '//', ';']
|
||||
for (let i = 0; i < separators.length; i++) {
|
||||
if (genreTag.includes(separators[i])) {
|
||||
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
|
||||
}
|
||||
}
|
||||
return [genreTag]
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../models/LibraryItem')} libraryItem
|
||||
|
|
@ -477,190 +366,59 @@ class PodcastScanner {
|
|||
await fsExtra.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataFileFormat = global.ServerSettings.metadataFileFormat
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${metadataFileFormat}`)
|
||||
if (metadataFileFormat === 'json') {
|
||||
// Remove metadata.abs if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.abs`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.abs for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.abs`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.abs`)))
|
||||
}
|
||||
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
|
||||
|
||||
// TODO: Update to not use `metadata` so it fits the updated model
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
metadata: {
|
||||
title: libraryItem.media.title,
|
||||
author: libraryItem.media.author,
|
||||
description: libraryItem.media.description,
|
||||
releaseDate: libraryItem.media.releaseDate,
|
||||
genres: libraryItem.media.genres || [],
|
||||
feedUrl: libraryItem.media.feedURL,
|
||||
imageUrl: libraryItem.media.imageURL,
|
||||
itunesPageUrl: libraryItem.media.itunesPageURL,
|
||||
itunesId: libraryItem.media.itunesId,
|
||||
itunesArtistId: libraryItem.media.itunesArtistId,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
type: libraryItem.media.podcastType
|
||||
}
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
} else {
|
||||
// Remove metadata.json if it exists
|
||||
if (await fsExtra.pathExists(Path.join(metadataPath, `metadata.json`))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Removing metadata.json for item "${libraryItem.media.title}"`)
|
||||
await fsExtra.remove(Path.join(metadataPath, `metadata.json`))
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== filePathToPOSIX(Path.join(metadataPath, `metadata.json`)))
|
||||
}
|
||||
|
||||
return abmetadataGenerator.generateFromNewModel(libraryItem, metadataFilePath).then(async (success) => {
|
||||
if (!success) {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed saving abmetadata to "${metadataFilePath}"`)
|
||||
return null
|
||||
}
|
||||
// Add metadata.abs to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.abs`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
return metadataLibraryFile
|
||||
})
|
||||
const jsonObject = {
|
||||
tags: libraryItem.media.tags || [],
|
||||
title: libraryItem.media.title,
|
||||
author: libraryItem.media.author,
|
||||
description: libraryItem.media.description,
|
||||
releaseDate: libraryItem.media.releaseDate,
|
||||
genres: libraryItem.media.genres || [],
|
||||
feedURL: libraryItem.media.feedURL,
|
||||
imageURL: libraryItem.media.imageURL,
|
||||
itunesPageURL: libraryItem.media.itunesPageURL,
|
||||
itunesId: libraryItem.media.itunesId,
|
||||
itunesArtistId: libraryItem.media.itunesArtistId,
|
||||
asin: libraryItem.media.asin,
|
||||
language: libraryItem.media.language,
|
||||
explicit: !!libraryItem.media.explicit,
|
||||
podcastType: libraryItem.media.podcastType
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PodcastEpisode} podcastEpisode Not the model when creating new podcast
|
||||
* @param {import('./ScanLogger')} scanLogger
|
||||
*/
|
||||
setPodcastEpisodeMetadataFromAudioFile(podcastEpisode, scanLogger) {
|
||||
const MetadataMapArray = [
|
||||
{
|
||||
tag: 'tagComment',
|
||||
altTag: 'tagSubtitle',
|
||||
key: 'description'
|
||||
},
|
||||
{
|
||||
tag: 'tagSubtitle',
|
||||
key: 'subtitle'
|
||||
},
|
||||
{
|
||||
tag: 'tagDate',
|
||||
key: 'pubDate'
|
||||
},
|
||||
{
|
||||
tag: 'tagDisc',
|
||||
key: 'season',
|
||||
},
|
||||
{
|
||||
tag: 'tagTrack',
|
||||
altTag: 'tagSeriesPart',
|
||||
key: 'episode'
|
||||
},
|
||||
{
|
||||
tag: 'tagTitle',
|
||||
key: 'title'
|
||||
},
|
||||
{
|
||||
tag: 'tagEpisodeType',
|
||||
key: 'episodeType'
|
||||
}
|
||||
]
|
||||
|
||||
const audioFileMetaTags = podcastEpisode.audioFile.metaTags
|
||||
const overrideExistingDetails = Database.serverSettings.scannerPreferAudioMetadata
|
||||
MetadataMapArray.forEach((mapping) => {
|
||||
let value = audioFileMetaTags[mapping.tag]
|
||||
let tagToUse = mapping.tag
|
||||
if (!value && mapping.altTag) {
|
||||
tagToUse = mapping.altTag
|
||||
value = audioFileMetaTags[mapping.altTag]
|
||||
}
|
||||
|
||||
if (value && typeof value === 'string') {
|
||||
value = value.trim() // Trim whitespace
|
||||
|
||||
if (mapping.key === 'pubDate' && (!podcastEpisode.pubDate || overrideExistingDetails)) {
|
||||
const pubJsDate = new Date(value)
|
||||
if (pubJsDate && !isNaN(pubJsDate)) {
|
||||
podcastEpisode.publishedAt = pubJsDate.valueOf()
|
||||
podcastEpisode.pubDate = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping pubDate with tag ${tagToUse} has invalid date "${value}"`)
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
} else if (mapping.key === 'episodeType' && (!podcastEpisode.episodeType || overrideExistingDetails)) {
|
||||
if (['full', 'trailer', 'bonus'].includes(value)) {
|
||||
podcastEpisode.episodeType = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
} else {
|
||||
scanLogger.addLog(LogLevel.WARN, `Mapping episodeType with invalid value "${value}". Must be one of [full, trailer, bonus].`)
|
||||
}
|
||||
} else if (!podcastEpisode[mapping.key] || overrideExistingDetails) {
|
||||
podcastEpisode[mapping.key] = value
|
||||
scanLogger.addLog(LogLevel.DEBUG, `Mapping metadata to key ${tagToUse} => ${mapping.key}: ${podcastEpisode[mapping.key]}`)
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ const Author = require('../objects/entities/Author')
|
|||
const Series = require('../objects/entities/Series')
|
||||
const LibraryScanner = require('./LibraryScanner')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
const TaskManager = require('../managers/TaskManager')
|
||||
|
||||
class Scanner {
|
||||
constructor() { }
|
||||
|
|
@ -36,7 +37,7 @@ class Scanner {
|
|||
var searchISBN = options.isbn || libraryItem.media.metadata.isbn
|
||||
var searchASIN = options.asin || libraryItem.media.metadata.asin
|
||||
|
||||
var results = await BookFinder.search(provider, searchTitle, searchAuthor, searchISBN, searchASIN, { maxFuzzySearches: 2 })
|
||||
var results = await BookFinder.search(libraryItem, provider, searchTitle, searchAuthor, searchISBN, searchASIN, { maxFuzzySearches: 2 })
|
||||
if (!results.length) {
|
||||
return {
|
||||
warning: `No ${provider} match found`
|
||||
|
|
@ -280,6 +281,14 @@ class Scanner {
|
|||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick match library items
|
||||
*
|
||||
* @param {import('../objects/Library')} library
|
||||
* @param {import('../objects/LibraryItem')[]} libraryItems
|
||||
* @param {LibraryScan} libraryScan
|
||||
* @returns {Promise<boolean>} false if scan canceled
|
||||
*/
|
||||
async matchLibraryItemsChunk(library, libraryItems, libraryScan) {
|
||||
for (let i = 0; i < libraryItems.length; i++) {
|
||||
const libraryItem = libraryItems[i]
|
||||
|
|
@ -313,6 +322,11 @@ class Scanner {
|
|||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick match all library items for library
|
||||
*
|
||||
* @param {import('../objects/Library')} library
|
||||
*/
|
||||
async matchLibraryItems(library) {
|
||||
if (library.mediaType === 'podcast') {
|
||||
Logger.error(`[Scanner] matchLibraryItems: Match all not supported for podcasts yet`)
|
||||
|
|
@ -330,11 +344,14 @@ class Scanner {
|
|||
const libraryScan = new LibraryScan()
|
||||
libraryScan.setData(library, 'match')
|
||||
LibraryScanner.librariesScanning.push(libraryScan.getScanEmitData)
|
||||
SocketAuthority.emitter('scan_start', libraryScan.getScanEmitData)
|
||||
|
||||
const taskData = {
|
||||
libraryId: library.id
|
||||
}
|
||||
const task = TaskManager.createAndAddTask('library-match-all', `Matching books in "${library.name}"`, null, true, taskData)
|
||||
Logger.info(`[Scanner] matchLibraryItems: Starting library match scan ${libraryScan.id} for ${libraryScan.libraryName}`)
|
||||
|
||||
let hasMoreChunks = true
|
||||
let isCanceled = false
|
||||
while (hasMoreChunks) {
|
||||
const libraryItems = await Database.libraryItemModel.getLibraryItemsIncrement(offset, limit, { libraryId: library.id })
|
||||
if (!libraryItems.length) {
|
||||
|
|
@ -347,6 +364,7 @@ class Scanner {
|
|||
|
||||
const shouldContinue = await this.matchLibraryItemsChunk(library, oldLibraryItems, libraryScan)
|
||||
if (!shouldContinue) {
|
||||
isCanceled = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
|
@ -354,13 +372,15 @@ class Scanner {
|
|||
if (offset === 0) {
|
||||
Logger.error(`[Scanner] matchLibraryItems: Library has no items ${library.id}`)
|
||||
libraryScan.setComplete('Library has no items')
|
||||
task.setFailed(libraryScan.error)
|
||||
} else {
|
||||
libraryScan.setComplete()
|
||||
task.setFinished(isCanceled ? 'Canceled' : libraryScan.scanResultsString)
|
||||
}
|
||||
|
||||
delete LibraryScanner.cancelLibraryScan[libraryScan.libraryId]
|
||||
LibraryScanner.librariesScanning = LibraryScanner.librariesScanning.filter(ls => ls.id !== library.id)
|
||||
SocketAuthority.emitter('scan_complete', libraryScan.getScanEmitData)
|
||||
TaskManager.taskFinished(task)
|
||||
}
|
||||
}
|
||||
module.exports = new Scanner()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
const fs = require('../libs/fsExtra')
|
||||
const rra = require('../libs/recursiveReaddirAsync')
|
||||
const axios = require('axios')
|
||||
const Path = require('path')
|
||||
const ssrfFilter = require('ssrf-req-filter')
|
||||
const exec = require('child_process').exec
|
||||
const fs = require('../libs/fsExtra')
|
||||
const rra = require('../libs/recursiveReaddirAsync')
|
||||
const Logger = require('../Logger')
|
||||
const { AudioMimeType } = require('./constants')
|
||||
|
||||
|
|
@ -18,22 +20,33 @@ const filePathToPOSIX = (path) => {
|
|||
}
|
||||
module.exports.filePathToPOSIX = filePathToPOSIX
|
||||
|
||||
async function getFileStat(path) {
|
||||
/**
|
||||
* Check path is a child of or equal to another path
|
||||
*
|
||||
* @param {string} parentPath
|
||||
* @param {string} childPath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isSameOrSubPath(parentPath, childPath) {
|
||||
parentPath = filePathToPOSIX(parentPath)
|
||||
childPath = filePathToPOSIX(childPath)
|
||||
if (parentPath === childPath) return true
|
||||
const relativePath = Path.relative(parentPath, childPath)
|
||||
return (
|
||||
relativePath === '' // Same path (e.g. parentPath = '/a/b/', childPath = '/a/b')
|
||||
|| !relativePath.startsWith('..') && !Path.isAbsolute(relativePath) // Sub path
|
||||
)
|
||||
}
|
||||
module.exports.isSameOrSubPath = isSameOrSubPath
|
||||
|
||||
function getFileStat(path) {
|
||||
try {
|
||||
var stat = await fs.stat(path)
|
||||
return {
|
||||
size: stat.size,
|
||||
atime: stat.atime,
|
||||
mtime: stat.mtime,
|
||||
ctime: stat.ctime,
|
||||
birthtime: stat.birthtime
|
||||
}
|
||||
return fs.stat(path)
|
||||
} catch (err) {
|
||||
Logger.error('[fileUtils] Failed to stat', err)
|
||||
return false
|
||||
return null
|
||||
}
|
||||
}
|
||||
module.exports.getFileStat = getFileStat
|
||||
|
||||
async function getFileTimestampsWithIno(path) {
|
||||
try {
|
||||
|
|
@ -52,12 +65,30 @@ async function getFileTimestampsWithIno(path) {
|
|||
}
|
||||
module.exports.getFileTimestampsWithIno = getFileTimestampsWithIno
|
||||
|
||||
async function getFileSize(path) {
|
||||
var stat = await getFileStat(path)
|
||||
if (!stat) return 0
|
||||
return stat.size || 0
|
||||
/**
|
||||
* Get file size
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
module.exports.getFileSize = async (path) => {
|
||||
return (await getFileStat(path))?.size || 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file mtimeMs
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns {Promise<number>} epoch timestamp
|
||||
*/
|
||||
module.exports.getFileMTimeMs = async (path) => {
|
||||
try {
|
||||
return (await getFileStat(path))?.mtimeMs || 0
|
||||
} catch (err) {
|
||||
Logger.error(`[fileUtils] Failed to getFileMtimeMs`, err)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
module.exports.getFileSize = getFileSize
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
@ -203,15 +234,32 @@ async function recurseFiles(path, relPathToReplace = null) {
|
|||
}
|
||||
module.exports.recurseFiles = recurseFiles
|
||||
|
||||
module.exports.downloadFile = (url, filepath) => {
|
||||
/**
|
||||
* Download file from web to local file system
|
||||
* Uses SSRF filter to prevent internal URLs
|
||||
*
|
||||
* @param {string} url
|
||||
* @param {string} filepath path to download the file to
|
||||
* @param {Function} [contentTypeFilter] validate content type before writing
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.downloadFile = (url, filepath, contentTypeFilter = null) => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
Logger.debug(`[fileUtils] Downloading file to ${filepath}`)
|
||||
axios({
|
||||
url,
|
||||
method: 'GET',
|
||||
responseType: 'stream',
|
||||
timeout: 30000
|
||||
timeout: 30000,
|
||||
httpAgent: ssrfFilter(url),
|
||||
httpsAgent: ssrfFilter(url)
|
||||
}).then((response) => {
|
||||
// Validate content type
|
||||
if (contentTypeFilter && !contentTypeFilter?.(response.headers?.['content-type'])) {
|
||||
return reject(new Error(`Invalid content type "${response.headers?.['content-type'] || ''}"`))
|
||||
}
|
||||
|
||||
// Write to filepath
|
||||
const writer = fs.createWriteStream(filepath)
|
||||
response.data.pipe(writer)
|
||||
|
||||
|
|
@ -224,6 +272,21 @@ module.exports.downloadFile = (url, filepath) => {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Download image file from web to local file system
|
||||
* Response header must have content-type of image/ (excluding svg)
|
||||
*
|
||||
* @param {string} url
|
||||
* @param {string} filepath
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.downloadImageFile = (url, filepath) => {
|
||||
const contentTypeFilter = (contentType) => {
|
||||
return contentType?.startsWith('image/') && contentType !== 'image/svg+xml'
|
||||
}
|
||||
return this.downloadFile(url, filepath, contentTypeFilter)
|
||||
}
|
||||
|
||||
module.exports.sanitizeFilename = (filename, colonReplacement = ' - ') => {
|
||||
if (typeof filename !== 'string') {
|
||||
return false
|
||||
|
|
@ -251,6 +314,7 @@ module.exports.sanitizeFilename = (filename, colonReplacement = ' - ') => {
|
|||
.replace(lineBreaks, replacement)
|
||||
.replace(windowsReservedRe, replacement)
|
||||
.replace(windowsTrailingRe, replacement)
|
||||
.replace(/\s+/g, ' ') // Replace consecutive spaces with a single space
|
||||
|
||||
// Check if basename is too many bytes
|
||||
const ext = Path.extname(sanitized) // separate out file extension
|
||||
|
|
@ -296,3 +360,84 @@ module.exports.encodeUriPath = (path) => {
|
|||
const uri = new URL(path, "file://")
|
||||
return uri.pathname
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if directory is writable.
|
||||
* This method is necessary because fs.access(directory, fs.constants.W_OK) does not work on Windows
|
||||
*
|
||||
* @param {string} directory
|
||||
* @returns {boolean}
|
||||
*/
|
||||
module.exports.isWritable = async (directory) => {
|
||||
try {
|
||||
const accessTestFile = path.join(directory, 'accessTest')
|
||||
await fs.writeFile(accessTestFile, '')
|
||||
await fs.remove(accessTestFile)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Windows drives as array e.g. ["C:/", "F:/"]
|
||||
*
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
module.exports.getWindowsDrives = async () => {
|
||||
if (!global.isWin) {
|
||||
return []
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
exec('wmic logicaldisk get name', async (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
return
|
||||
}
|
||||
let drives = stdout?.split(/\r?\n/).map(line => line.trim()).filter(line => line).slice(1)
|
||||
const validDrives = []
|
||||
for (const drive of drives) {
|
||||
let drivepath = drive + '/'
|
||||
if (await fs.pathExists(drivepath)) {
|
||||
validDrives.push(drivepath)
|
||||
} else {
|
||||
Logger.error(`Invalid drive ${drivepath}`)
|
||||
}
|
||||
}
|
||||
resolve(validDrives)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get array of directory paths in a directory
|
||||
*
|
||||
* @param {string} dirPath
|
||||
* @param {number} level
|
||||
* @returns {Promise<{ path:string, dirname:string, level:number }[]>}
|
||||
*/
|
||||
module.exports.getDirectoriesInPath = async (dirPath, level) => {
|
||||
try {
|
||||
const paths = await fs.readdir(dirPath)
|
||||
let dirs = await Promise.all(paths.map(async dirname => {
|
||||
const fullPath = Path.join(dirPath, dirname)
|
||||
|
||||
const lstat = await fs.lstat(fullPath).catch((error) => {
|
||||
Logger.debug(`Failed to lstat "${fullPath}"`, error)
|
||||
return null
|
||||
})
|
||||
if (!lstat?.isDirectory()) return null
|
||||
|
||||
return {
|
||||
path: this.filePathToPOSIX(fullPath),
|
||||
dirname,
|
||||
level
|
||||
}
|
||||
}))
|
||||
dirs = dirs.filter(d => d)
|
||||
return dirs
|
||||
} catch (error) {
|
||||
Logger.error('Failed to readdir', dirPath, error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
|
@ -1,461 +1,26 @@
|
|||
const fs = require('../../libs/fsExtra')
|
||||
const package = require('../../../package.json')
|
||||
const Logger = require('../../Logger')
|
||||
const { getId } = require('../index')
|
||||
const areEquivalent = require('../areEquivalent')
|
||||
|
||||
|
||||
const CurrentAbMetadataVersion = 2
|
||||
// abmetadata v1 key map
|
||||
// const bookKeyMap = {
|
||||
// title: 'title',
|
||||
// subtitle: 'subtitle',
|
||||
// author: 'authorFL',
|
||||
// narrator: 'narratorFL',
|
||||
// publishedYear: 'publishedYear',
|
||||
// publisher: 'publisher',
|
||||
// description: 'description',
|
||||
// isbn: 'isbn',
|
||||
// asin: 'asin',
|
||||
// language: 'language',
|
||||
// genres: 'genresCommaSeparated'
|
||||
// }
|
||||
|
||||
const commaSeparatedToArray = (v) => {
|
||||
if (!v) return []
|
||||
return [...new Set(v.split(',').map(_v => _v.trim()).filter(_v => _v))]
|
||||
}
|
||||
|
||||
const podcastMetadataMapper = {
|
||||
title: {
|
||||
to: (m) => m.title || '',
|
||||
from: (v) => v || ''
|
||||
},
|
||||
author: {
|
||||
to: (m) => m.author || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
language: {
|
||||
to: (m) => m.language || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
genres: {
|
||||
to: (m) => m.genres?.join(', ') || '',
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
feedUrl: {
|
||||
to: (m) => m.feedUrl || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
itunesId: {
|
||||
to: (m) => m.itunesId || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
explicit: {
|
||||
to: (m) => m.explicit ? 'Y' : 'N',
|
||||
from: (v) => v && v.toLowerCase() == 'y'
|
||||
}
|
||||
}
|
||||
|
||||
const bookMetadataMapper = {
|
||||
title: {
|
||||
to: (m) => m.title || '',
|
||||
from: (v) => v || ''
|
||||
},
|
||||
subtitle: {
|
||||
to: (m) => m.subtitle || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
authors: {
|
||||
to: (m) => {
|
||||
if (m.authorName !== undefined) return m.authorName
|
||||
if (!m.authors?.length) return ''
|
||||
return m.authors.map(au => au.name).join(', ')
|
||||
},
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
narrators: {
|
||||
to: (m) => m.narrators?.join(', ') || '',
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
publishedYear: {
|
||||
to: (m) => m.publishedYear || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
publisher: {
|
||||
to: (m) => m.publisher || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
isbn: {
|
||||
to: (m) => m.isbn || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
asin: {
|
||||
to: (m) => m.asin || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
language: {
|
||||
to: (m) => m.language || '',
|
||||
from: (v) => v || null
|
||||
},
|
||||
genres: {
|
||||
to: (m) => m.genres?.join(', ') || '',
|
||||
from: (v) => commaSeparatedToArray(v)
|
||||
},
|
||||
series: {
|
||||
to: (m) => {
|
||||
if (m.seriesName !== undefined) return m.seriesName
|
||||
if (!m.series?.length) return ''
|
||||
return m.series.map((se) => {
|
||||
const sequence = se.bookSeries?.sequence || ''
|
||||
if (!sequence) return se.name
|
||||
return `${se.name} #${sequence}`
|
||||
}).join(', ')
|
||||
},
|
||||
from: (v) => {
|
||||
return commaSeparatedToArray(v).map(series => { // Return array of { name, sequence }
|
||||
let sequence = null
|
||||
let name = series
|
||||
// Series sequence match any characters after " #" other than whitespace and another #
|
||||
// e.g. "Name #1a" is valid. "Name #1#a" or "Name #1 a" is not valid.
|
||||
const matchResults = series.match(/ #([^#\s]+)$/) // Pull out sequence #
|
||||
if (matchResults && matchResults.length && matchResults.length > 1) {
|
||||
sequence = matchResults[1] // Group 1
|
||||
name = series.replace(matchResults[0], '')
|
||||
}
|
||||
return {
|
||||
name,
|
||||
sequence
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
explicit: {
|
||||
to: (m) => m.explicit ? 'Y' : 'N',
|
||||
from: (v) => v && v.toLowerCase() == 'y'
|
||||
},
|
||||
abridged: {
|
||||
to: (m) => m.abridged ? 'Y' : 'N',
|
||||
from: (v) => v && v.toLowerCase() == 'y'
|
||||
}
|
||||
}
|
||||
|
||||
const metadataMappers = {
|
||||
book: bookMetadataMapper,
|
||||
podcast: podcastMetadataMapper
|
||||
}
|
||||
|
||||
function generate(libraryItem, outputPath) {
|
||||
let fileString = `;ABMETADATA${CurrentAbMetadataVersion}\n`
|
||||
fileString += `#audiobookshelf v${package.version}\n\n`
|
||||
|
||||
const mediaType = libraryItem.mediaType
|
||||
|
||||
fileString += `media=${mediaType}\n`
|
||||
fileString += `tags=${JSON.stringify(libraryItem.media.tags)}\n`
|
||||
|
||||
const metadataMapper = metadataMappers[mediaType]
|
||||
var mediaMetadata = libraryItem.media.metadata
|
||||
for (const key in metadataMapper) {
|
||||
fileString += `${key}=${metadataMapper[key].to(mediaMetadata)}\n`
|
||||
}
|
||||
|
||||
// Description block
|
||||
if (mediaMetadata.description) {
|
||||
fileString += '\n[DESCRIPTION]\n'
|
||||
fileString += mediaMetadata.description + '\n'
|
||||
}
|
||||
|
||||
// Book chapters
|
||||
if (libraryItem.mediaType == 'book' && libraryItem.media.chapters.length) {
|
||||
fileString += '\n'
|
||||
libraryItem.media.chapters.forEach((chapter) => {
|
||||
fileString += `[CHAPTER]\n`
|
||||
fileString += `start=${chapter.start}\n`
|
||||
fileString += `end=${chapter.end}\n`
|
||||
fileString += `title=${chapter.title}\n`
|
||||
})
|
||||
}
|
||||
return fs.writeFile(outputPath, fileString).then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetaFileGenerator] Failed to save abs file`, error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
module.exports.generate = generate
|
||||
|
||||
function generateFromNewModel(libraryItem, outputPath) {
|
||||
let fileString = `;ABMETADATA${CurrentAbMetadataVersion}\n`
|
||||
fileString += `#audiobookshelf v${package.version}\n\n`
|
||||
|
||||
const mediaType = libraryItem.mediaType
|
||||
|
||||
fileString += `media=${mediaType}\n`
|
||||
fileString += `tags=${JSON.stringify(libraryItem.media.tags || '')}\n`
|
||||
|
||||
const metadataMapper = metadataMappers[mediaType]
|
||||
for (const key in metadataMapper) {
|
||||
fileString += `${key}=${metadataMapper[key].to(libraryItem.media)}\n`
|
||||
}
|
||||
|
||||
// Description block
|
||||
if (libraryItem.media.description) {
|
||||
fileString += '\n[DESCRIPTION]\n'
|
||||
fileString += libraryItem.media.description + '\n'
|
||||
}
|
||||
|
||||
// Book chapters
|
||||
if (mediaType == 'book' && libraryItem.media.chapters?.length) {
|
||||
fileString += '\n'
|
||||
libraryItem.media.chapters.forEach((chapter) => {
|
||||
fileString += `[CHAPTER]\n`
|
||||
fileString += `start=${chapter.start}\n`
|
||||
fileString += `end=${chapter.end}\n`
|
||||
fileString += `title=${chapter.title}\n`
|
||||
})
|
||||
}
|
||||
return fs.writeFile(outputPath, fileString).then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetaFileGenerator] Failed to save abs file`, error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
module.exports.generateFromNewModel = generateFromNewModel
|
||||
|
||||
function parseSections(lines) {
|
||||
if (!lines || !lines.length || !lines[0].startsWith('[')) { // First line must be section start
|
||||
return []
|
||||
}
|
||||
|
||||
var sections = []
|
||||
var currentSection = []
|
||||
lines.forEach(line => {
|
||||
if (!line || !line.trim()) return
|
||||
|
||||
if (line.startsWith('[') && currentSection.length) { // current section ended
|
||||
sections.push(currentSection)
|
||||
currentSection = []
|
||||
}
|
||||
|
||||
currentSection.push(line)
|
||||
})
|
||||
if (currentSection.length) sections.push(currentSection)
|
||||
return sections
|
||||
}
|
||||
|
||||
// lines inside chapter section
|
||||
function parseChapterLines(lines) {
|
||||
var chapter = {
|
||||
start: null,
|
||||
end: null,
|
||||
title: null
|
||||
}
|
||||
|
||||
lines.forEach((line) => {
|
||||
var keyValue = line.split('=')
|
||||
if (keyValue.length > 1) {
|
||||
var key = keyValue[0].trim()
|
||||
var value = keyValue[1].trim()
|
||||
|
||||
if (key === 'start' || key === 'end') {
|
||||
if (!isNaN(value)) {
|
||||
chapter[key] = Number(value)
|
||||
} else {
|
||||
Logger.warn(`[abmetadataGenerator] Invalid chapter value for ${key}: ${value}`)
|
||||
}
|
||||
} else if (key === 'title') {
|
||||
chapter[key] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (chapter.start === null || chapter.end === null || chapter.end < chapter.start) {
|
||||
Logger.warn(`[abmetadataGenerator] Invalid chapter`)
|
||||
return null
|
||||
}
|
||||
return chapter
|
||||
}
|
||||
|
||||
function parseTags(value) {
|
||||
if (!value) return null
|
||||
try {
|
||||
const parsedTags = []
|
||||
JSON.parse(value).forEach((loadedTag) => {
|
||||
if (loadedTag.trim()) parsedTags.push(loadedTag) // Only push tags that are non-empty
|
||||
})
|
||||
return parsedTags
|
||||
} catch (err) {
|
||||
Logger.error(`[abmetadataGenerator] Error parsing TAGS "${value}":`, err.message)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function parseAbMetadataText(text, mediaType) {
|
||||
if (!text) return null
|
||||
let lines = text.split(/\r?\n/)
|
||||
|
||||
// Check first line and get abmetadata version number
|
||||
const firstLine = lines.shift().toLowerCase()
|
||||
if (!firstLine.startsWith(';abmetadata')) {
|
||||
Logger.error(`Invalid abmetadata file first line is not ;abmetadata "${firstLine}"`)
|
||||
return null
|
||||
}
|
||||
const abmetadataVersion = Number(firstLine.replace(';abmetadata', '').trim())
|
||||
if (isNaN(abmetadataVersion) || abmetadataVersion != CurrentAbMetadataVersion) {
|
||||
Logger.warn(`Invalid abmetadata version ${abmetadataVersion} - must use version ${CurrentAbMetadataVersion}`)
|
||||
return null
|
||||
}
|
||||
|
||||
// Remove comments and empty lines
|
||||
const ignoreFirstChars = [' ', '#', ';'] // Ignore any line starting with the following
|
||||
lines = lines.filter(line => !!line.trim() && !ignoreFirstChars.includes(line[0]))
|
||||
|
||||
// Get lines that map to book details (all lines before the first chapter or description section)
|
||||
const firstSectionLine = lines.findIndex(l => l.startsWith('['))
|
||||
const detailLines = firstSectionLine > 0 ? lines.slice(0, firstSectionLine) : lines
|
||||
const remainingLines = firstSectionLine > 0 ? lines.slice(firstSectionLine) : []
|
||||
|
||||
if (!detailLines.length) {
|
||||
Logger.error(`Invalid abmetadata file no detail lines`)
|
||||
return null
|
||||
}
|
||||
|
||||
// Check the media type saved for this abmetadata file show warning if not matching expected
|
||||
if (detailLines[0].toLowerCase().startsWith('media=')) {
|
||||
const mediaLine = detailLines.shift() // Remove media line
|
||||
const abMediaType = mediaLine.toLowerCase().split('=')[1].trim()
|
||||
if (abMediaType != mediaType) {
|
||||
Logger.warn(`Invalid media type in abmetadata file ${abMediaType} expecting ${mediaType}`)
|
||||
}
|
||||
} else {
|
||||
Logger.warn(`No media type found in abmetadata file - expecting ${mediaType}`)
|
||||
}
|
||||
|
||||
const metadataMapper = metadataMappers[mediaType]
|
||||
// Put valid book detail values into map
|
||||
const mediaDetails = {
|
||||
metadata: {},
|
||||
chapters: [],
|
||||
tags: null // When tags are null it will not be used
|
||||
}
|
||||
|
||||
for (let i = 0; i < detailLines.length; i++) {
|
||||
const line = detailLines[i]
|
||||
const keyValue = line.split('=')
|
||||
if (keyValue.length < 2) {
|
||||
Logger.warn('abmetadata invalid line has no =', line)
|
||||
} else if (keyValue[0].trim() === 'tags') { // Parse tags
|
||||
const value = keyValue.slice(1).join('=').trim() // Everything after "tags="
|
||||
mediaDetails.tags = parseTags(value)
|
||||
} else if (!metadataMapper[keyValue[0].trim()]) { // Ensure valid media metadata key
|
||||
Logger.warn(`abmetadata key "${keyValue[0].trim()}" is not a valid ${mediaType} metadata key`)
|
||||
} else {
|
||||
const key = keyValue.shift().trim()
|
||||
const value = keyValue.join('=').trim()
|
||||
mediaDetails.metadata[key] = metadataMapper[key].from(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse sections for description and chapters
|
||||
const sections = parseSections(remainingLines)
|
||||
sections.forEach((section) => {
|
||||
const sectionHeader = section.shift()
|
||||
if (sectionHeader.toLowerCase().startsWith('[description]')) {
|
||||
mediaDetails.metadata.description = section.join('\n')
|
||||
} else if (sectionHeader.toLowerCase().startsWith('[chapter]')) {
|
||||
const chapter = parseChapterLines(section)
|
||||
if (chapter) {
|
||||
mediaDetails.chapters.push(chapter)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
mediaDetails.chapters.sort((a, b) => a.start - b.start)
|
||||
|
||||
if (mediaDetails.chapters.length) {
|
||||
mediaDetails.chapters = cleanChaptersArray(mediaDetails.chapters, mediaDetails.metadata.title) || []
|
||||
}
|
||||
|
||||
return mediaDetails
|
||||
}
|
||||
module.exports.parse = parseAbMetadataText
|
||||
|
||||
function checkUpdatedBookAuthors(abmetadataAuthors, authors) {
|
||||
const finalAuthors = []
|
||||
let hasUpdates = false
|
||||
|
||||
abmetadataAuthors.forEach((authorName) => {
|
||||
const findAuthor = authors.find(au => au.name.toLowerCase() == authorName.toLowerCase())
|
||||
if (!findAuthor) {
|
||||
hasUpdates = true
|
||||
finalAuthors.push({
|
||||
id: getId('new'), // New author gets created in Scanner.js after library scan
|
||||
name: authorName
|
||||
})
|
||||
} else {
|
||||
finalAuthors.push(findAuthor)
|
||||
}
|
||||
})
|
||||
|
||||
var authorsRemoved = authors.filter(au => !abmetadataAuthors.some(auname => auname.toLowerCase() == au.name.toLowerCase()))
|
||||
if (authorsRemoved.length) {
|
||||
hasUpdates = true
|
||||
}
|
||||
|
||||
return {
|
||||
authors: finalAuthors,
|
||||
hasUpdates
|
||||
}
|
||||
}
|
||||
|
||||
function checkUpdatedBookSeries(abmetadataSeries, series) {
|
||||
var finalSeries = []
|
||||
var hasUpdates = false
|
||||
|
||||
abmetadataSeries.forEach((seriesObj) => {
|
||||
var findSeries = series.find(se => se.name.toLowerCase() == seriesObj.name.toLowerCase())
|
||||
if (!findSeries) {
|
||||
hasUpdates = true
|
||||
finalSeries.push({
|
||||
id: getId('new'), // New series gets created in Scanner.js after library scan
|
||||
name: seriesObj.name,
|
||||
sequence: seriesObj.sequence
|
||||
})
|
||||
} else if (findSeries.sequence != seriesObj.sequence) { // Sequence was updated
|
||||
hasUpdates = true
|
||||
finalSeries.push({
|
||||
id: findSeries.id,
|
||||
name: findSeries.name,
|
||||
sequence: seriesObj.sequence
|
||||
})
|
||||
} else {
|
||||
finalSeries.push(findSeries)
|
||||
}
|
||||
})
|
||||
|
||||
var seriesRemoved = series.filter(se => !abmetadataSeries.some(_se => _se.name.toLowerCase() == se.name.toLowerCase()))
|
||||
if (seriesRemoved.length) {
|
||||
hasUpdates = true
|
||||
}
|
||||
|
||||
return {
|
||||
series: finalSeries,
|
||||
hasUpdates
|
||||
}
|
||||
}
|
||||
|
||||
function checkArraysChanged(abmetadataArray, mediaArray) {
|
||||
if (!Array.isArray(abmetadataArray)) return false
|
||||
if (!Array.isArray(mediaArray)) return true
|
||||
return abmetadataArray.join(',') != mediaArray.join(',')
|
||||
}
|
||||
|
||||
function parseJsonMetadataText(text) {
|
||||
try {
|
||||
const abmetadataData = JSON.parse(text)
|
||||
if (!abmetadataData.metadata) abmetadataData.metadata = {}
|
||||
|
||||
if (abmetadataData.metadata.series?.length) {
|
||||
abmetadataData.metadata.series = [...new Set(abmetadataData.metadata.series.map(t => t?.trim()).filter(t => t))]
|
||||
abmetadataData.metadata.series = abmetadataData.metadata.series.map(series => {
|
||||
// Old metadata.json used nested "metadata"
|
||||
if (abmetadataData.metadata) {
|
||||
for (const key in abmetadataData.metadata) {
|
||||
if (abmetadataData.metadata[key] === undefined) continue
|
||||
let newModelKey = key
|
||||
if (key === 'feedUrl') newModelKey = 'feedURL'
|
||||
else if (key === 'imageUrl') newModelKey = 'imageURL'
|
||||
else if (key === 'itunesPageUrl') newModelKey = 'itunesPageURL'
|
||||
else if (key === 'type') newModelKey = 'podcastType'
|
||||
abmetadataData[newModelKey] = abmetadataData.metadata[key]
|
||||
}
|
||||
}
|
||||
delete abmetadataData.metadata
|
||||
|
||||
if (abmetadataData.series?.length) {
|
||||
abmetadataData.series = [...new Set(abmetadataData.series.map(t => t?.trim()).filter(t => t))]
|
||||
abmetadataData.series = abmetadataData.series.map(series => {
|
||||
let sequence = null
|
||||
let name = series
|
||||
// Series sequence match any characters after " #" other than whitespace and another #
|
||||
|
|
@ -476,17 +41,17 @@ function parseJsonMetadataText(text) {
|
|||
abmetadataData.tags = [...new Set(abmetadataData.tags.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
if (abmetadataData.chapters?.length) {
|
||||
abmetadataData.chapters = cleanChaptersArray(abmetadataData.chapters, abmetadataData.metadata.title)
|
||||
abmetadataData.chapters = cleanChaptersArray(abmetadataData.chapters, abmetadataData.title)
|
||||
}
|
||||
// clean remove dupes
|
||||
if (abmetadataData.metadata.authors?.length) {
|
||||
abmetadataData.metadata.authors = [...new Set(abmetadataData.metadata.authors.map(t => t?.trim()).filter(t => t))]
|
||||
if (abmetadataData.authors?.length) {
|
||||
abmetadataData.authors = [...new Set(abmetadataData.authors.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
if (abmetadataData.metadata.narrators?.length) {
|
||||
abmetadataData.metadata.narrators = [...new Set(abmetadataData.metadata.narrators.map(t => t?.trim()).filter(t => t))]
|
||||
if (abmetadataData.narrators?.length) {
|
||||
abmetadataData.narrators = [...new Set(abmetadataData.narrators.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
if (abmetadataData.metadata.genres?.length) {
|
||||
abmetadataData.metadata.genres = [...new Set(abmetadataData.metadata.genres.map(t => t?.trim()).filter(t => t))]
|
||||
if (abmetadataData.genres?.length) {
|
||||
abmetadataData.genres = [...new Set(abmetadataData.genres.map(t => t?.trim()).filter(t => t))]
|
||||
}
|
||||
return abmetadataData
|
||||
} catch (error) {
|
||||
|
|
@ -522,73 +87,3 @@ function cleanChaptersArray(chaptersArray, mediaTitle) {
|
|||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
// Input text from abmetadata file and return object of media changes
|
||||
// only returns object of changes. empty object means no changes
|
||||
function parseAndCheckForUpdates(text, media, mediaType, isJSON) {
|
||||
if (!text || !media || !media.metadata || !mediaType) {
|
||||
Logger.error(`Invalid inputs to parseAndCheckForUpdates`)
|
||||
return null
|
||||
}
|
||||
|
||||
const mediaMetadata = media.metadata
|
||||
const metadataUpdatePayload = {} // Only updated key/values
|
||||
|
||||
let abmetadataData = null
|
||||
|
||||
if (isJSON) {
|
||||
abmetadataData = parseJsonMetadataText(text)
|
||||
} else {
|
||||
abmetadataData = parseAbMetadataText(text, mediaType)
|
||||
}
|
||||
|
||||
if (!abmetadataData || !abmetadataData.metadata) {
|
||||
Logger.error(`[abmetadataGenerator] Invalid metadata file`)
|
||||
return null
|
||||
}
|
||||
|
||||
const abMetadata = abmetadataData.metadata // Metadata from abmetadata file
|
||||
for (const key in abMetadata) {
|
||||
if (mediaMetadata[key] !== undefined) {
|
||||
if (key === 'authors') {
|
||||
const authorUpdatePayload = checkUpdatedBookAuthors(abMetadata[key], mediaMetadata[key])
|
||||
if (authorUpdatePayload.hasUpdates) metadataUpdatePayload.authors = authorUpdatePayload.authors
|
||||
} else if (key === 'series') {
|
||||
const seriesUpdatePayload = checkUpdatedBookSeries(abMetadata[key], mediaMetadata[key])
|
||||
if (seriesUpdatePayload.hasUpdates) metadataUpdatePayload.series = seriesUpdatePayload.series
|
||||
} else if (key === 'genres' || key === 'narrators') { // Compare array differences
|
||||
if (checkArraysChanged(abMetadata[key], mediaMetadata[key])) {
|
||||
metadataUpdatePayload[key] = abMetadata[key]
|
||||
}
|
||||
} else if (abMetadata[key] !== mediaMetadata[key]) {
|
||||
metadataUpdatePayload[key] = abMetadata[key]
|
||||
}
|
||||
} else {
|
||||
Logger.warn('[abmetadataGenerator] Invalid key', key)
|
||||
}
|
||||
}
|
||||
|
||||
const updatePayload = {} // Only updated key/values
|
||||
// Check update tags
|
||||
if (abmetadataData.tags) {
|
||||
if (checkArraysChanged(abmetadataData.tags, media.tags)) {
|
||||
updatePayload.tags = abmetadataData.tags
|
||||
}
|
||||
}
|
||||
|
||||
if (abmetadataData.chapters && mediaType === 'book') {
|
||||
const abmetadataChaptersCleaned = cleanChaptersArray(abmetadataData.chapters)
|
||||
if (abmetadataChaptersCleaned) {
|
||||
if (!areEquivalent(abmetadataChaptersCleaned, media.chapters)) {
|
||||
updatePayload.chapters = abmetadataChaptersCleaned
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataUpdatePayload).length) {
|
||||
updatePayload.metadata = metadataUpdatePayload
|
||||
}
|
||||
|
||||
return updatePayload
|
||||
}
|
||||
module.exports.parseAndCheckForUpdates = parseAndCheckForUpdates
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
const xml = require('../../libs/xml')
|
||||
const escapeForXML = require('../../libs/xml/escapeForXML')
|
||||
|
||||
/**
|
||||
* Generate OPML file string for podcasts in a library
|
||||
|
|
@ -12,18 +13,18 @@ module.exports.generate = (podcasts, indent = true) => {
|
|||
if (!podcast.feedURL) return
|
||||
const feedAttributes = {
|
||||
type: 'rss',
|
||||
text: podcast.title,
|
||||
title: podcast.title,
|
||||
xmlUrl: podcast.feedURL
|
||||
text: escapeForXML(podcast.title),
|
||||
title: escapeForXML(podcast.title),
|
||||
xmlUrl: escapeForXML(podcast.feedURL)
|
||||
}
|
||||
if (podcast.description) {
|
||||
feedAttributes.description = podcast.description
|
||||
feedAttributes.description = escapeForXML(podcast.description)
|
||||
}
|
||||
if (podcast.itunesPageUrl) {
|
||||
feedAttributes.htmlUrl = podcast.itunesPageUrl
|
||||
feedAttributes.htmlUrl = escapeForXML(podcast.itunesPageUrl)
|
||||
}
|
||||
if (podcast.language) {
|
||||
feedAttributes.language = podcast.language
|
||||
feedAttributes.language = escapeForXML(podcast.language)
|
||||
}
|
||||
bodyItems.push({
|
||||
outline: {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
const Path = require('path')
|
||||
const uuid = require('uuid')
|
||||
const Logger = require('../Logger')
|
||||
const { parseString } = require("xml2js")
|
||||
const areEquivalent = require('./areEquivalent')
|
||||
|
|
@ -11,24 +12,24 @@ const levenshteinDistance = (str1, str2, caseSensitive = false) => {
|
|||
str2 = str2.toLowerCase()
|
||||
}
|
||||
const track = Array(str2.length + 1).fill(null).map(() =>
|
||||
Array(str1.length + 1).fill(null));
|
||||
Array(str1.length + 1).fill(null))
|
||||
for (let i = 0; i <= str1.length; i += 1) {
|
||||
track[0][i] = i;
|
||||
track[0][i] = i
|
||||
}
|
||||
for (let j = 0; j <= str2.length; j += 1) {
|
||||
track[j][0] = j;
|
||||
track[j][0] = j
|
||||
}
|
||||
for (let j = 1; j <= str2.length; j += 1) {
|
||||
for (let i = 1; i <= str1.length; i += 1) {
|
||||
const indicator = str1[i - 1] === str2[j - 1] ? 0 : 1;
|
||||
const indicator = str1[i - 1] === str2[j - 1] ? 0 : 1
|
||||
track[j][i] = Math.min(
|
||||
track[j][i - 1] + 1, // deletion
|
||||
track[j - 1][i] + 1, // insertion
|
||||
track[j - 1][i - 1] + indicator, // substitution
|
||||
);
|
||||
)
|
||||
}
|
||||
}
|
||||
return track[str2.length][str1.length];
|
||||
return track[str2.length][str1.length]
|
||||
}
|
||||
module.exports.levenshteinDistance = levenshteinDistance
|
||||
|
||||
|
|
@ -65,6 +66,9 @@ module.exports.getId = (prepend = '') => {
|
|||
}
|
||||
|
||||
function elapsedPretty(seconds) {
|
||||
if (seconds > 0 && seconds < 1) {
|
||||
return `${Math.floor(seconds * 1000)} ms`
|
||||
}
|
||||
if (seconds < 60) {
|
||||
return `${Math.floor(seconds)} sec`
|
||||
}
|
||||
|
|
@ -166,4 +170,66 @@ module.exports.getTitleIgnorePrefix = (title) => {
|
|||
module.exports.getTitlePrefixAtEnd = (title) => {
|
||||
let [sort, prefix] = getTitleParts(title)
|
||||
return prefix ? `${sort}, ${prefix}` : title
|
||||
}
|
||||
|
||||
/**
|
||||
* to lower case for only ascii characters
|
||||
* used to handle sqlite that doesnt support unicode lower
|
||||
* @see https://github.com/advplyr/audiobookshelf/issues/2187
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
module.exports.asciiOnlyToLowerCase = (str) => {
|
||||
if (!str) return ''
|
||||
|
||||
let temp = ''
|
||||
for (let chars of str) {
|
||||
let value = chars.charCodeAt()
|
||||
if (value >= 65 && value <= 90) {
|
||||
temp += String.fromCharCode(value + 32)
|
||||
} else {
|
||||
temp += chars
|
||||
}
|
||||
}
|
||||
return temp
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape string used in RegExp
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
module.exports.escapeRegExp = (str) => {
|
||||
if (typeof str !== 'string') return ''
|
||||
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate url string with URL class
|
||||
*
|
||||
* @param {string} rawUrl
|
||||
* @returns {string} null if invalid
|
||||
*/
|
||||
module.exports.validateUrl = (rawUrl) => {
|
||||
if (!rawUrl || typeof rawUrl !== 'string') return null
|
||||
try {
|
||||
return new URL(rawUrl).toString()
|
||||
} catch (error) {
|
||||
Logger.error(`Invalid URL "${rawUrl}"`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a string is a valid UUID
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {boolean}
|
||||
*/
|
||||
module.exports.isUUID = (str) => {
|
||||
if (!str || typeof str !== 'string') return false
|
||||
return uuid.validate(str)
|
||||
}
|
||||
93
server/utils/migrations/absMetadataMigration.js
Normal file
93
server/utils/migrations/absMetadataMigration.js
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const fsExtra = require('../../libs/fsExtra')
|
||||
const fileUtils = require('../fileUtils')
|
||||
const LibraryFile = require('../../objects/files/LibraryFile')
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../../models/LibraryItem')} libraryItem
|
||||
* @returns {Promise<boolean>} false if failed
|
||||
*/
|
||||
async function writeMetadataFileForItem(libraryItem) {
|
||||
const storeMetadataWithItem = global.ServerSettings.storeMetadataWithItem && !libraryItem.isFile
|
||||
const metadataPath = storeMetadataWithItem ? libraryItem.path : Path.join(global.MetadataPath, 'items', libraryItem.id)
|
||||
const metadataFilepath = fileUtils.filePathToPOSIX(Path.join(metadataPath, 'metadata.json'))
|
||||
if ((await fsExtra.pathExists(metadataFilepath))) {
|
||||
// Metadata file already exists do nothing
|
||||
return null
|
||||
}
|
||||
Logger.info(`[absMetadataMigration] metadata file not found at "${metadataFilepath}" - creating`)
|
||||
|
||||
if (!storeMetadataWithItem) {
|
||||
// Ensure /metadata/items/<lid> dir
|
||||
await fsExtra.ensureDir(metadataPath)
|
||||
}
|
||||
|
||||
const metadataJson = libraryItem.media.getAbsMetadataJson()
|
||||
|
||||
// Save to file
|
||||
const success = await fsExtra.writeFile(metadataFilepath, JSON.stringify(metadataJson, null, 2)).then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetadataMigration] failed to save metadata file at "${metadataFilepath}"`, error.message || error)
|
||||
return false
|
||||
})
|
||||
|
||||
if (!success) return false
|
||||
if (!storeMetadataWithItem) return true // No need to do anything else
|
||||
|
||||
// Safety check to make sure library file with the same path isnt already there
|
||||
libraryItem.libraryFiles = libraryItem.libraryFiles.filter(lf => lf.metadata.path !== metadataFilepath)
|
||||
|
||||
// Put new library file in library item
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilepath, 'metadata.json')
|
||||
libraryItem.libraryFiles.push(newLibraryFile.toJSON())
|
||||
|
||||
// Update library item timestamps and total size
|
||||
const libraryItemDirTimestamps = await fileUtils.getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
|
||||
libraryItem.changed('libraryFiles', true)
|
||||
return libraryItem.save().then(() => true).catch((error) => {
|
||||
Logger.error(`[absMetadataMigration] failed to save libraryItem "${libraryItem.id}"`, error.message || error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../../Database')} Database
|
||||
* @param {number} [offset=0]
|
||||
* @param {number} [totalCreated=0]
|
||||
*/
|
||||
async function runMigration(Database, offset = 0, totalCreated = 0) {
|
||||
const libraryItems = await Database.libraryItemModel.getLibraryItemsIncrement(offset, 500, { isMissing: false })
|
||||
if (!libraryItems.length) return totalCreated
|
||||
|
||||
let numCreated = 0
|
||||
for (const libraryItem of libraryItems) {
|
||||
const success = await writeMetadataFileForItem(libraryItem)
|
||||
if (success) numCreated++
|
||||
}
|
||||
|
||||
if (libraryItems.length < 500) {
|
||||
return totalCreated + numCreated
|
||||
}
|
||||
return runMigration(Database, offset + libraryItems.length, totalCreated + numCreated)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../../Database')} Database
|
||||
*/
|
||||
module.exports.migrate = async (Database) => {
|
||||
Logger.info(`[absMetadataMigration] Starting metadata.json migration`)
|
||||
const totalCreated = await runMigration(Database)
|
||||
Logger.info(`[absMetadataMigration] Finished metadata.json migration (${totalCreated} files created)`)
|
||||
}
|
||||
42
server/utils/parsers/parseEbookMetadata.js
Normal file
42
server/utils/parsers/parseEbookMetadata.js
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
const parseEpubMetadata = require('./parseEpubMetadata')
|
||||
|
||||
/**
|
||||
* @typedef EBookFileScanData
|
||||
* @property {string} path
|
||||
* @property {string} ebookFormat
|
||||
* @property {string} ebookCoverPath internal image path
|
||||
* @property {import('../../scanner/BookScanner').BookMetadataObject} metadata
|
||||
*/
|
||||
|
||||
/**
|
||||
* Parse metadata from ebook file
|
||||
*
|
||||
* @param {import('../../models/Book').EBookFileObject} ebookFile
|
||||
* @returns {Promise<EBookFileScanData>}
|
||||
*/
|
||||
async function parse(ebookFile) {
|
||||
if (!ebookFile) return null
|
||||
|
||||
if (ebookFile.ebookFormat === 'epub') {
|
||||
return parseEpubMetadata.parse(ebookFile.metadata.path)
|
||||
}
|
||||
return null
|
||||
}
|
||||
module.exports.parse = parse
|
||||
|
||||
/**
|
||||
* Extract cover from ebook file
|
||||
*
|
||||
* @param {EBookFileScanData} ebookFileScanData
|
||||
* @param {string} outputCoverPath
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function extractCoverImage(ebookFileScanData, outputCoverPath) {
|
||||
if (!ebookFileScanData?.ebookCoverPath) return false
|
||||
|
||||
if (ebookFileScanData.ebookFormat === 'epub') {
|
||||
return parseEpubMetadata.extractCoverImage(ebookFileScanData.path, ebookFileScanData.ebookCoverPath, outputCoverPath)
|
||||
}
|
||||
return false
|
||||
}
|
||||
module.exports.extractCoverImage = extractCoverImage
|
||||
109
server/utils/parsers/parseEpubMetadata.js
Normal file
109
server/utils/parsers/parseEpubMetadata.js
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
const Path = require('path')
|
||||
const Logger = require('../../Logger')
|
||||
const StreamZip = require('../../libs/nodeStreamZip')
|
||||
const parseOpfMetadata = require('./parseOpfMetadata')
|
||||
const { xmlToJSON } = require('../index')
|
||||
|
||||
|
||||
/**
|
||||
* Extract file from epub and return string content
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @param {string} filepath
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async function extractFileFromEpub(epubPath, filepath) {
|
||||
const zip = new StreamZip.async({ file: epubPath })
|
||||
const data = await zip.entryData(filepath).catch((error) => {
|
||||
Logger.error(`[parseEpubMetadata] Failed to extract ${filepath} from epub at "${epubPath}"`, error)
|
||||
})
|
||||
const filedata = data?.toString('utf8')
|
||||
await zip.close()
|
||||
return filedata
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract an XML file from epub and return JSON
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @param {string} xmlFilepath
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function extractXmlToJson(epubPath, xmlFilepath) {
|
||||
const filedata = await extractFileFromEpub(epubPath, xmlFilepath)
|
||||
if (!filedata) return null
|
||||
return xmlToJSON(filedata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract cover image from epub return true if success
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @param {string} epubImageFilepath
|
||||
* @param {string} outputCoverPath
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function extractCoverImage(epubPath, epubImageFilepath, outputCoverPath) {
|
||||
const zip = new StreamZip.async({ file: epubPath })
|
||||
|
||||
const success = await zip.extract(epubImageFilepath, outputCoverPath).then(() => true).catch((error) => {
|
||||
Logger.error(`[parseEpubMetadata] Failed to extract image ${epubImageFilepath} from epub at "${epubPath}"`, error)
|
||||
return false
|
||||
})
|
||||
|
||||
await zip.close()
|
||||
|
||||
return success
|
||||
}
|
||||
module.exports.extractCoverImage = extractCoverImage
|
||||
|
||||
/**
|
||||
* Parse metadata from epub
|
||||
*
|
||||
* @param {string} epubPath
|
||||
* @returns {Promise<import('./parseEbookMetadata').EBookFileScanData>}
|
||||
*/
|
||||
async function parse(epubPath) {
|
||||
Logger.debug(`Parsing metadata from epub at "${epubPath}"`)
|
||||
// Entrypoint of the epub that contains the filepath to the package document (opf file)
|
||||
const containerJson = await extractXmlToJson(epubPath, 'META-INF/container.xml')
|
||||
|
||||
// Get package document opf filepath from container.xml
|
||||
const packageDocPath = containerJson.container?.rootfiles?.[0]?.rootfile?.[0]?.$?.['full-path']
|
||||
if (!packageDocPath) {
|
||||
Logger.error(`Failed to get package doc path in Container.xml`, JSON.stringify(containerJson, null, 2))
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract package document to JSON
|
||||
const packageJson = await extractXmlToJson(epubPath, packageDocPath)
|
||||
if (!packageJson) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Parse metadata from package document opf file
|
||||
const opfMetadata = parseOpfMetadata.parseOpfMetadataJson(packageJson)
|
||||
if (!opfMetadata) {
|
||||
Logger.error(`Unable to parse metadata in package doc with json`, JSON.stringify(packageJson, null, 2))
|
||||
return null
|
||||
}
|
||||
|
||||
const payload = {
|
||||
path: epubPath,
|
||||
ebookFormat: 'epub',
|
||||
metadata: opfMetadata
|
||||
}
|
||||
|
||||
// Attempt to find filepath to cover image
|
||||
const manifestFirstImage = packageJson.package?.manifest?.[0]?.item?.find(item => item.$?.['media-type']?.startsWith('image/'))
|
||||
let coverImagePath = manifestFirstImage?.$?.href
|
||||
if (coverImagePath) {
|
||||
const packageDirname = Path.dirname(packageDocPath)
|
||||
payload.ebookCoverPath = Path.posix.join(packageDirname, coverImagePath)
|
||||
} else {
|
||||
Logger.warn(`Cover image not found in manifest for epub at "${epubPath}"`)
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
module.exports.parse = parse
|
||||
100
server/utils/parsers/parseNfoMetadata.js
Normal file
100
server/utils/parsers/parseNfoMetadata.js
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
function parseNfoMetadata(nfoText) {
|
||||
if (!nfoText) return null
|
||||
const lines = nfoText.split(/\r?\n/)
|
||||
const metadata = {}
|
||||
let insideBookDescription = false
|
||||
lines.forEach(line => {
|
||||
if (line.search(/^\s*book description\s*$/i) !== -1) {
|
||||
insideBookDescription = true
|
||||
return
|
||||
}
|
||||
if (insideBookDescription) {
|
||||
if (line.search(/^\s*=+\s*$/i) !== -1) return
|
||||
metadata.description = metadata.description || ''
|
||||
metadata.description += line + '\n'
|
||||
return
|
||||
}
|
||||
const match = line.match(/^(.*?):(.*)$/)
|
||||
if (match) {
|
||||
const key = match[1].toLowerCase().trim()
|
||||
const value = match[2].trim()
|
||||
if (!value) return
|
||||
switch (key) {
|
||||
case 'title':
|
||||
{
|
||||
const titleMatch = value.match(/^(.*?):(.*)$/)
|
||||
if (titleMatch) {
|
||||
metadata.title = titleMatch[1].trim()
|
||||
metadata.subtitle = titleMatch[2].trim()
|
||||
} else {
|
||||
metadata.title = value
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'author':
|
||||
metadata.authors = value.split(/\s*,\s*/).filter(v => v)
|
||||
break
|
||||
case 'narrator':
|
||||
case 'read by':
|
||||
metadata.narrators = value.split(/\s*,\s*/).filter(v => v)
|
||||
break
|
||||
case 'series name':
|
||||
metadata.series = value
|
||||
break
|
||||
case 'genre':
|
||||
metadata.genres = value.split(/\s*,\s*/).filter(v => v)
|
||||
break
|
||||
case 'tags':
|
||||
metadata.tags = value.split(/\s*,\s*/).filter(v => v)
|
||||
break
|
||||
case 'copyright':
|
||||
case 'audible.com release':
|
||||
case 'audiobook copyright':
|
||||
case 'book copyright':
|
||||
case 'recording copyright':
|
||||
case 'release date':
|
||||
case 'date':
|
||||
{
|
||||
const year = extractYear(value)
|
||||
if (year) {
|
||||
metadata.publishedYear = year
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'position in series':
|
||||
metadata.sequence = value
|
||||
break
|
||||
case 'unabridged':
|
||||
metadata.abridged = value.toLowerCase() === 'yes' ? false : true
|
||||
break
|
||||
case 'abridged':
|
||||
metadata.abridged = value.toLowerCase() === 'no' ? false : true
|
||||
break
|
||||
case 'publisher':
|
||||
metadata.publisher = value
|
||||
break
|
||||
case 'asin':
|
||||
metadata.asin = value
|
||||
break
|
||||
case 'isbn':
|
||||
case 'isbn-10':
|
||||
case 'isbn-13':
|
||||
metadata.isbn = value
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Trim leading/trailing whitespace for description
|
||||
if (metadata.description) {
|
||||
metadata.description = metadata.description.trim()
|
||||
}
|
||||
|
||||
return metadata
|
||||
}
|
||||
module.exports = { parseNfoMetadata }
|
||||
|
||||
function extractYear(str) {
|
||||
const match = str.match(/\d{4}/g)
|
||||
return match ? match[match.length - 1] : null
|
||||
}
|
||||
|
|
@ -100,13 +100,28 @@ function fetchLanguage(metadata) {
|
|||
}
|
||||
|
||||
function fetchSeries(metadataMeta) {
|
||||
if (!metadataMeta) return null
|
||||
return fetchTagString(metadataMeta, "calibre:series")
|
||||
}
|
||||
if (!metadataMeta) return []
|
||||
const result = []
|
||||
for (let i = 0; i < metadataMeta.length; i++) {
|
||||
if (metadataMeta[i].$?.name === 'calibre:series' && metadataMeta[i].$.content?.trim()) {
|
||||
const name = metadataMeta[i].$.content.trim()
|
||||
let sequence = null
|
||||
if (metadataMeta[i + 1]?.$?.name === 'calibre:series_index' && metadataMeta[i + 1].$?.content?.trim()) {
|
||||
sequence = metadataMeta[i + 1].$.content.trim()
|
||||
}
|
||||
result.push({ name, sequence })
|
||||
}
|
||||
}
|
||||
|
||||
function fetchVolumeNumber(metadataMeta) {
|
||||
if (!metadataMeta) return null
|
||||
return fetchTagString(metadataMeta, "calibre:series_index")
|
||||
// If one series was found with no series_index then check if any series_index meta can be found
|
||||
// this is to support when calibre:series_index is not directly underneath calibre:series
|
||||
if (result.length === 1 && !result[0].sequence) {
|
||||
const seriesIndexMeta = metadataMeta.find(m => m.$?.name === 'calibre:series_index' && m.$.content?.trim())
|
||||
if (seriesIndexMeta) {
|
||||
result[0].sequence = seriesIndexMeta.$.content.trim()
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function fetchNarrators(creators, metadata) {
|
||||
|
|
@ -130,11 +145,7 @@ function stripPrefix(str) {
|
|||
return str.split(':').pop()
|
||||
}
|
||||
|
||||
module.exports.parseOpfMetadataXML = async (xml) => {
|
||||
const json = await xmlToJSON(xml)
|
||||
|
||||
if (!json) return null
|
||||
|
||||
module.exports.parseOpfMetadataJson = (json) => {
|
||||
// Handle <package ...> or with prefix <ns0:package ...>
|
||||
const packageKey = Object.keys(json).find(key => stripPrefix(key) === 'package')
|
||||
if (!packageKey) return null
|
||||
|
|
@ -161,7 +172,7 @@ module.exports.parseOpfMetadataXML = async (xml) => {
|
|||
const creators = parseCreators(metadata)
|
||||
const authors = (fetchCreators(creators, 'aut') || []).map(au => au?.trim()).filter(au => au)
|
||||
const narrators = (fetchNarrators(creators, metadata) || []).map(nrt => nrt?.trim()).filter(nrt => nrt)
|
||||
const data = {
|
||||
return {
|
||||
title: fetchTitle(metadata),
|
||||
subtitle: fetchSubtitle(metadata),
|
||||
authors,
|
||||
|
|
@ -173,9 +184,13 @@ module.exports.parseOpfMetadataXML = async (xml) => {
|
|||
description: fetchDescription(metadata),
|
||||
genres: fetchGenres(metadata),
|
||||
language: fetchLanguage(metadata),
|
||||
series: fetchSeries(metadata.meta),
|
||||
sequence: fetchVolumeNumber(metadata.meta),
|
||||
series: fetchSeries(metadataMeta),
|
||||
tags: fetchTags(metadata)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
module.exports.parseOpfMetadataXML = async (xml) => {
|
||||
const json = await xmlToJSON(xml)
|
||||
if (!json) return null
|
||||
return this.parseOpfMetadataJson(json)
|
||||
}
|
||||
|
|
@ -1,13 +1,6 @@
|
|||
const xml2js = require('xml2js')
|
||||
const Logger = require('../../Logger')
|
||||
|
||||
// given a list of audio files, extract all of the Overdrive Media Markers metaTags, and return an array of them as XML
|
||||
function extractOverdriveMediaMarkers(includedAudioFiles) {
|
||||
Logger.debug('[parseOverdriveMediaMarkers] Extracting overdrive media markers')
|
||||
var markers = includedAudioFiles.map((af) => af.metaTags.tagOverdriveMediaMarker).filter(af => af) || []
|
||||
|
||||
return markers
|
||||
}
|
||||
|
||||
// given the array of Overdrive Media Markers from generateOverdriveMediaMarkers()
|
||||
// parse and clean them in to something a bit more usable
|
||||
function cleanOverdriveMediaMarkers(overdriveMediaMarkers) {
|
||||
|
|
@ -29,12 +22,11 @@ function cleanOverdriveMediaMarkers(overdriveMediaMarkers) {
|
|||
]
|
||||
*/
|
||||
|
||||
var parseString = require('xml2js').parseString // function to convert xml to JSON
|
||||
var parsedOverdriveMediaMarkers = []
|
||||
|
||||
const parsedOverdriveMediaMarkers = []
|
||||
overdriveMediaMarkers.forEach((item, index) => {
|
||||
var parsed_result = null
|
||||
parseString(item, function (err, result) {
|
||||
let parsed_result = null
|
||||
// convert xml to JSON
|
||||
xml2js.parseString(item, function (err, result) {
|
||||
/*
|
||||
result.Markers.Marker is the result of parsing the XML for the MediaMarker tags for the MP3 file (Part##.mp3)
|
||||
it is shaped like this, and needs further cleaning below:
|
||||
|
|
@ -54,7 +46,7 @@ function cleanOverdriveMediaMarkers(overdriveMediaMarkers) {
|
|||
*/
|
||||
|
||||
// The values for Name and Time in results.Markers.Marker are returned as Arrays from parseString and should be strings
|
||||
if (result && result.Markers && result.Markers.Marker) {
|
||||
if (result?.Markers?.Marker) {
|
||||
parsed_result = objectValuesArrayToString(result.Markers.Marker)
|
||||
}
|
||||
})
|
||||
|
|
@ -138,22 +130,13 @@ function generateParsedChapters(includedAudioFiles, cleanedOverdriveMediaMarkers
|
|||
return parsedChapters
|
||||
}
|
||||
|
||||
module.exports.overdriveMediaMarkersExist = (includedAudioFiles) => {
|
||||
return extractOverdriveMediaMarkers(includedAudioFiles).length > 1
|
||||
}
|
||||
|
||||
module.exports.parseOverdriveMediaMarkersAsChapters = (includedAudioFiles) => {
|
||||
Logger.info('[parseOverdriveMediaMarkers] Parsing of Overdrive Media Markers started')
|
||||
|
||||
var overdriveMediaMarkers = extractOverdriveMediaMarkers(includedAudioFiles)
|
||||
const overdriveMediaMarkers = includedAudioFiles.map((af) => af.metaTags.tagOverdriveMediaMarker).filter(af => af) || []
|
||||
if (!overdriveMediaMarkers.length) return null
|
||||
|
||||
var cleanedOverdriveMediaMarkers = cleanOverdriveMediaMarkers(overdriveMediaMarkers)
|
||||
// TODO: generateParsedChapters requires overdrive media markers and included audio files length to be the same
|
||||
// so if not equal then we must exit
|
||||
if (cleanedOverdriveMediaMarkers.length !== includedAudioFiles.length) return null
|
||||
|
||||
var parsedChapters = generateParsedChapters(includedAudioFiles, cleanedOverdriveMediaMarkers)
|
||||
|
||||
return parsedChapters
|
||||
return generateParsedChapters(includedAudioFiles, cleanedOverdriveMediaMarkers)
|
||||
}
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
const Logger = require('../Logger')
|
||||
const axios = require('axios')
|
||||
const ssrfFilter = require('ssrf-req-filter')
|
||||
const Logger = require('../Logger')
|
||||
const { xmlToJSON, levenshteinDistance } = require('./index')
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
|
||||
function extractFirstArrayItem(json, key) {
|
||||
if (!json[key] || !json[key].length) return null
|
||||
if (!json[key]?.length) return null
|
||||
return json[key][0]
|
||||
}
|
||||
|
||||
|
|
@ -66,7 +67,7 @@ function extractPodcastMetadata(channel) {
|
|||
arrayFields.forEach((key) => {
|
||||
const cleanKey = key.split(':').pop()
|
||||
let value = extractFirstArrayItem(channel, key)
|
||||
if (value && value['_']) value = value['_']
|
||||
if (value?.['_']) value = value['_']
|
||||
metadata[cleanKey] = value
|
||||
})
|
||||
return metadata
|
||||
|
|
@ -110,17 +111,30 @@ function extractEpisodeData(item) {
|
|||
const pubDate = extractFirstArrayItem(item, 'pubDate')
|
||||
if (typeof pubDate === 'string') {
|
||||
episode.pubDate = pubDate
|
||||
} else if (pubDate && typeof pubDate._ === 'string') {
|
||||
} else if (typeof pubDate?._ === 'string') {
|
||||
episode.pubDate = pubDate._
|
||||
} else {
|
||||
Logger.error(`[podcastUtils] Invalid pubDate ${item['pubDate']} for ${episode.enclosure.url}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (item['guid']) {
|
||||
const guidItem = extractFirstArrayItem(item, 'guid')
|
||||
if (typeof guidItem === 'string') {
|
||||
episode.guid = guidItem
|
||||
} else if (typeof guidItem?._ === 'string') {
|
||||
episode.guid = guidItem._
|
||||
} else {
|
||||
Logger.error(`[podcastUtils] Invalid guid ${item['guid']} for ${episode.enclosure.url}`)
|
||||
}
|
||||
}
|
||||
|
||||
const arrayFields = ['title', 'itunes:episodeType', 'itunes:season', 'itunes:episode', 'itunes:author', 'itunes:duration', 'itunes:explicit', 'itunes:subtitle']
|
||||
arrayFields.forEach((key) => {
|
||||
const cleanKey = key.split(':').pop()
|
||||
episode[cleanKey] = extractFirstArrayItem(item, key)
|
||||
let value = extractFirstArrayItem(item, key)
|
||||
if (value?.['_']) value = value['_']
|
||||
episode[cleanKey] = value
|
||||
})
|
||||
return episode
|
||||
}
|
||||
|
|
@ -142,6 +156,7 @@ function cleanEpisodeData(data) {
|
|||
explicit: data.explicit || '',
|
||||
publishedAt,
|
||||
enclosure: data.enclosure,
|
||||
guid: data.guid || null,
|
||||
chaptersUrl: data.chaptersUrl || null,
|
||||
chaptersType: data.chaptersType || null
|
||||
}
|
||||
|
|
@ -159,16 +174,16 @@ function extractPodcastEpisodes(items) {
|
|||
}
|
||||
|
||||
function cleanPodcastJson(rssJson, excludeEpisodeMetadata) {
|
||||
if (!rssJson.channel || !rssJson.channel.length) {
|
||||
if (!rssJson.channel?.length) {
|
||||
Logger.error(`[podcastUtil] Invalid podcast no channel object`)
|
||||
return null
|
||||
}
|
||||
var channel = rssJson.channel[0]
|
||||
if (!channel.item || !channel.item.length) {
|
||||
const channel = rssJson.channel[0]
|
||||
if (!channel.item?.length) {
|
||||
Logger.error(`[podcastUtil] Invalid podcast no episodes`)
|
||||
return null
|
||||
}
|
||||
var podcast = {
|
||||
const podcast = {
|
||||
metadata: extractPodcastMetadata(channel)
|
||||
}
|
||||
if (!excludeEpisodeMetadata) {
|
||||
|
|
@ -181,8 +196,8 @@ function cleanPodcastJson(rssJson, excludeEpisodeMetadata) {
|
|||
|
||||
module.exports.parsePodcastRssFeedXml = async (xml, excludeEpisodeMetadata = false, includeRaw = false) => {
|
||||
if (!xml) return null
|
||||
var json = await xmlToJSON(xml)
|
||||
if (!json || !json.rss) {
|
||||
const json = await xmlToJSON(xml)
|
||||
if (!json?.rss) {
|
||||
Logger.error('[podcastUtils] Invalid XML or RSS feed')
|
||||
return null
|
||||
}
|
||||
|
|
@ -202,9 +217,26 @@ module.exports.parsePodcastRssFeedXml = async (xml, excludeEpisodeMetadata = fal
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get podcast RSS feed as JSON
|
||||
* Uses SSRF filter to prevent internal URLs
|
||||
*
|
||||
* @param {string} feedUrl
|
||||
* @param {boolean} [excludeEpisodeMetadata=false]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
||||
Logger.debug(`[podcastUtils] getPodcastFeed for "${feedUrl}"`)
|
||||
return axios.get(feedUrl, { timeout: 12000, responseType: 'arraybuffer' }).then(async (data) => {
|
||||
|
||||
return axios({
|
||||
url: feedUrl,
|
||||
method: 'GET',
|
||||
timeout: 12000,
|
||||
responseType: 'arraybuffer',
|
||||
headers: { Accept: 'application/rss+xml, application/xhtml+xml, application/xml, */*;q=0.8' },
|
||||
httpAgent: ssrfFilter(feedUrl),
|
||||
httpsAgent: ssrfFilter(feedUrl)
|
||||
}).then(async (data) => {
|
||||
|
||||
// Adding support for ios-8859-1 encoded RSS feeds.
|
||||
// See: https://github.com/advplyr/audiobookshelf/issues/1489
|
||||
|
|
@ -215,14 +247,14 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
|||
data.data = data.data.toString()
|
||||
}
|
||||
|
||||
if (!data || !data.data) {
|
||||
if (!data?.data) {
|
||||
Logger.error(`[podcastUtils] getPodcastFeed: Invalid podcast feed request response (${feedUrl})`)
|
||||
return false
|
||||
return null
|
||||
}
|
||||
Logger.debug(`[podcastUtils] getPodcastFeed for "${feedUrl}" success - parsing xml`)
|
||||
var payload = await this.parsePodcastRssFeedXml(data.data, excludeEpisodeMetadata)
|
||||
const payload = await this.parsePodcastRssFeedXml(data.data, excludeEpisodeMetadata)
|
||||
if (!payload) {
|
||||
return false
|
||||
return null
|
||||
}
|
||||
|
||||
// RSS feed may be a private RSS feed
|
||||
|
|
@ -231,7 +263,7 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
|||
return payload.podcast
|
||||
}).catch((error) => {
|
||||
Logger.error('[podcastUtils] getPodcastFeed Error', error)
|
||||
return false
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -246,7 +278,7 @@ module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => {
|
|||
|
||||
module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => {
|
||||
searchTitle = searchTitle.toLowerCase().trim()
|
||||
if (!feed || !feed.episodes) {
|
||||
if (!feed?.episodes) {
|
||||
return null
|
||||
}
|
||||
|
||||
|
|
|
|||
162
server/utils/queries/adminStats.js
Normal file
162
server/utils/queries/adminStats.js
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
const Sequelize = require('sequelize')
|
||||
const Database = require('../../Database')
|
||||
const PlaybackSession = require('../../models/PlaybackSession')
|
||||
const fsExtra = require('../../libs/fsExtra')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
* @returns {Promise<PlaybackSession[]>}
|
||||
*/
|
||||
async getListeningSessionsForYear(year) {
|
||||
const sessions = await Database.playbackSessionModel.findAll({
|
||||
where: {
|
||||
createdAt: {
|
||||
[Sequelize.Op.gte]: `${year}-01-01`,
|
||||
[Sequelize.Op.lt]: `${year + 1}-01-01`
|
||||
}
|
||||
}
|
||||
})
|
||||
return sessions
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
async getNumAuthorsAddedForYear(year) {
|
||||
const count = await Database.authorModel.count({
|
||||
where: {
|
||||
createdAt: {
|
||||
[Sequelize.Op.gte]: `${year}-01-01`,
|
||||
[Sequelize.Op.lt]: `${year + 1}-01-01`
|
||||
}
|
||||
}
|
||||
})
|
||||
return count
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
* @returns {Promise<import('../../models/Book')[]>}
|
||||
*/
|
||||
async getBooksAddedForYear(year) {
|
||||
const books = await Database.bookModel.findAll({
|
||||
attributes: ['id', 'title', 'coverPath', 'duration', 'createdAt'],
|
||||
where: {
|
||||
createdAt: {
|
||||
[Sequelize.Op.gte]: `${year}-01-01`,
|
||||
[Sequelize.Op.lt]: `${year + 1}-01-01`
|
||||
}
|
||||
},
|
||||
include: {
|
||||
model: Database.libraryItemModel,
|
||||
attributes: ['id', 'mediaId', 'mediaType', 'size'],
|
||||
required: true
|
||||
},
|
||||
order: Database.sequelize.random()
|
||||
})
|
||||
return books
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
*/
|
||||
async getStatsForYear(year) {
|
||||
const booksAdded = await this.getBooksAddedForYear(year)
|
||||
|
||||
let totalBooksAddedSize = 0
|
||||
let totalBooksAddedDuration = 0
|
||||
const booksWithCovers = []
|
||||
|
||||
for (const book of booksAdded) {
|
||||
// Grab first 25 that have a cover
|
||||
if (book.coverPath && !booksWithCovers.includes(book.libraryItem.id) && booksWithCovers.length < 25 && await fsExtra.pathExists(book.coverPath)) {
|
||||
booksWithCovers.push(book.libraryItem.id)
|
||||
}
|
||||
if (book.duration && !isNaN(book.duration)) {
|
||||
totalBooksAddedDuration += book.duration
|
||||
}
|
||||
if (book.libraryItem.size && !isNaN(book.libraryItem.size)) {
|
||||
totalBooksAddedSize += book.libraryItem.size
|
||||
}
|
||||
}
|
||||
|
||||
const numAuthorsAdded = await this.getNumAuthorsAddedForYear(year)
|
||||
|
||||
let authorListeningMap = {}
|
||||
let narratorListeningMap = {}
|
||||
let genreListeningMap = {}
|
||||
|
||||
const listeningSessions = await this.getListeningSessionsForYear(year)
|
||||
let totalListeningTime = 0
|
||||
for (const ls of listeningSessions) {
|
||||
totalListeningTime += (ls.timeListening || 0)
|
||||
|
||||
const authors = ls.mediaMetadata.authors || []
|
||||
authors.forEach((au) => {
|
||||
if (!authorListeningMap[au.name]) authorListeningMap[au.name] = 0
|
||||
authorListeningMap[au.name] += (ls.timeListening || 0)
|
||||
})
|
||||
|
||||
const narrators = ls.mediaMetadata.narrators || []
|
||||
narrators.forEach((narrator) => {
|
||||
if (!narratorListeningMap[narrator]) narratorListeningMap[narrator] = 0
|
||||
narratorListeningMap[narrator] += (ls.timeListening || 0)
|
||||
})
|
||||
|
||||
// Filter out bad genres like "audiobook" and "audio book"
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
genres.forEach((genre) => {
|
||||
if (!genreListeningMap[genre]) genreListeningMap[genre] = 0
|
||||
genreListeningMap[genre] += (ls.timeListening || 0)
|
||||
})
|
||||
}
|
||||
|
||||
let topAuthors = null
|
||||
topAuthors = Object.keys(authorListeningMap).map(authorName => ({
|
||||
name: authorName,
|
||||
time: Math.round(authorListeningMap[authorName])
|
||||
})).sort((a, b) => b.time - a.time).slice(0, 3)
|
||||
|
||||
let topNarrators = null
|
||||
topNarrators = Object.keys(narratorListeningMap).map(narratorName => ({
|
||||
name: narratorName,
|
||||
time: Math.round(narratorListeningMap[narratorName])
|
||||
})).sort((a, b) => b.time - a.time).slice(0, 3)
|
||||
|
||||
let topGenres = null
|
||||
topGenres = Object.keys(genreListeningMap).map(genre => ({
|
||||
genre,
|
||||
time: Math.round(genreListeningMap[genre])
|
||||
})).sort((a, b) => b.time - a.time).slice(0, 3)
|
||||
|
||||
// Stats for total books, size and duration for everything added this year or earlier
|
||||
const [totalStatResultsRow] = await Database.sequelize.query(`SELECT SUM(li.size) AS totalSize, SUM(b.duration) AS totalDuration, COUNT(*) AS totalItems FROM libraryItems li, books b WHERE b.id = li.mediaId AND li.mediaType = 'book' AND li.createdAt < ":nextYear-01-01";`, {
|
||||
replacements: {
|
||||
nextYear: year + 1
|
||||
}
|
||||
})
|
||||
const totalStatResults = totalStatResultsRow[0]
|
||||
|
||||
return {
|
||||
numListeningSessions: listeningSessions.length,
|
||||
numBooksAdded: booksAdded.length,
|
||||
numAuthorsAdded,
|
||||
totalBooksAddedSize,
|
||||
totalBooksAddedDuration: Math.round(totalBooksAddedDuration),
|
||||
booksAddedWithCovers: booksWithCovers,
|
||||
totalBooksSize: totalStatResults?.totalSize || 0,
|
||||
totalBooksDuration: totalStatResults?.totalDuration || 0,
|
||||
totalListeningTime,
|
||||
numBooks: totalStatResults?.totalItems || 0,
|
||||
topAuthors,
|
||||
topNarrators,
|
||||
topGenres
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -308,6 +308,8 @@ module.exports = {
|
|||
async getNewestAuthors(library, user, limit) {
|
||||
if (library.mediaType !== 'book') return { authors: [], count: 0 }
|
||||
|
||||
const { bookWhere, replacements } = libraryItemsBookFilters.getUserPermissionBookWhereQuery(user)
|
||||
|
||||
const { rows: authors, count } = await Database.authorModel.findAndCountAll({
|
||||
where: {
|
||||
libraryId: library.id,
|
||||
|
|
@ -315,9 +317,15 @@ module.exports = {
|
|||
[Sequelize.Op.gte]: new Date(new Date() - (60 * 24 * 60 * 60 * 1000)) // 60 days ago
|
||||
}
|
||||
},
|
||||
replacements,
|
||||
include: {
|
||||
model: Database.bookAuthorModel,
|
||||
required: true // Must belong to a book
|
||||
model: Database.bookModel,
|
||||
attributes: ['id', 'tags', 'explicit'],
|
||||
where: bookWhere,
|
||||
required: true, // Must belong to a book
|
||||
through: {
|
||||
attributes: []
|
||||
}
|
||||
},
|
||||
limit,
|
||||
distinct: true,
|
||||
|
|
@ -328,7 +336,7 @@ module.exports = {
|
|||
|
||||
return {
|
||||
authors: authors.map((au) => {
|
||||
const numBooks = au.bookAuthors?.length || 0
|
||||
const numBooks = au.books.length || 0
|
||||
return au.getOldAuthor().toJSONExpanded(numBooks)
|
||||
}),
|
||||
count
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ const Sequelize = require('sequelize')
|
|||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const authorFilters = require('./authorFilters')
|
||||
const { asciiOnlyToLowerCase } = require('../index')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
|
@ -1013,7 +1014,8 @@ module.exports = {
|
|||
let matchText = null
|
||||
let matchKey = null
|
||||
for (const key of ['title', 'subtitle', 'asin', 'isbn']) {
|
||||
if (book[key]?.toLowerCase().includes(query)) {
|
||||
const valueToLower = asciiOnlyToLowerCase(book[key])
|
||||
if (valueToLower.includes(query)) {
|
||||
matchText = book[key]
|
||||
matchKey = key
|
||||
break
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
const Sequelize = require('sequelize')
|
||||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const { asciiOnlyToLowerCase } = require('../index')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
|
@ -364,7 +365,8 @@ module.exports = {
|
|||
let matchText = null
|
||||
let matchKey = null
|
||||
for (const key of ['title', 'author', 'itunesId', 'itunesArtistId']) {
|
||||
if (podcast[key]?.toLowerCase().includes(query)) {
|
||||
const valueToLower = asciiOnlyToLowerCase(podcast[key])
|
||||
if (valueToLower.includes(query)) {
|
||||
matchText = podcast[key]
|
||||
matchKey = key
|
||||
break
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue