mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2026-01-08 03:59:37 +00:00
Add:Chromecast support in experimental #367, Change:Audio player model for direct play
This commit is contained in:
parent
9f133ba98c
commit
89f498f31a
26 changed files with 1113 additions and 672 deletions
19
client/players/AudioTrack.js
Normal file
19
client/players/AudioTrack.js
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
export default class AudioTrack {
|
||||
constructor(track) {
|
||||
this.index = track.index || 0
|
||||
this.startOffset = track.startOffset || 0 // Total time of all previous tracks
|
||||
this.duration = track.duration || 0
|
||||
this.title = track.filename || ''
|
||||
this.contentUrl = track.contentUrl || null
|
||||
this.mimeType = track.mimeType
|
||||
}
|
||||
|
||||
get fullContentUrl() {
|
||||
if (!this.contentUrl || this.contentUrl.startsWith('http')) return this.contentUrl
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
return `${process.env.serverUrl}${this.contentUrl}`
|
||||
}
|
||||
return `${window.location.origin}/${this.contentUrl}`
|
||||
}
|
||||
}
|
||||
140
client/players/CastPlayer.js
Normal file
140
client/players/CastPlayer.js
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
import { buildCastLoadRequest, castLoadMedia } from "./castUtils"
|
||||
import EventEmitter from 'events'
|
||||
|
||||
export default class CastPlayer extends EventEmitter {
|
||||
constructor(ctx) {
|
||||
super()
|
||||
|
||||
this.ctx = ctx
|
||||
this.player = null
|
||||
this.playerController = null
|
||||
|
||||
this.audiobook = null
|
||||
this.audioTracks = []
|
||||
this.currentTrackIndex = 0
|
||||
this.hlsStreamId = null
|
||||
this.currentTime = 0
|
||||
this.playWhenReady = false
|
||||
this.defaultPlaybackRate = 1
|
||||
|
||||
this.coverUrl = ''
|
||||
this.castPlayerState = 'IDLE'
|
||||
|
||||
// Supported audio codecs for chromecast
|
||||
this.supportedAudioCodecs = ['opus', 'mp3', 'aac', 'flac', 'webma', 'wav']
|
||||
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
get currentTrack() {
|
||||
return this.audioTracks[this.currentTrackIndex] || {}
|
||||
}
|
||||
|
||||
initialize() {
|
||||
this.player = this.ctx.$root.castPlayer
|
||||
this.playerController = this.ctx.$root.castPlayerController
|
||||
this.playerController.addEventListener(
|
||||
cast.framework.RemotePlayerEventType.MEDIA_INFO_CHANGED, this.evtMediaInfoChanged.bind(this))
|
||||
}
|
||||
|
||||
evtMediaInfoChanged() {
|
||||
// Use the current session to get an up to date media status.
|
||||
let session = cast.framework.CastContext.getInstance().getCurrentSession()
|
||||
if (!session) {
|
||||
return
|
||||
}
|
||||
let media = session.getMediaSession()
|
||||
if (!media) {
|
||||
return
|
||||
}
|
||||
|
||||
// var currentItemId = media.currentItemId
|
||||
var currentItemId = media.media.itemId
|
||||
if (currentItemId && this.currentTrackIndex !== currentItemId - 1) {
|
||||
this.currentTrackIndex = currentItemId - 1
|
||||
}
|
||||
|
||||
if (media.playerState !== this.castPlayerState) {
|
||||
this.emit('stateChange', media.playerState)
|
||||
this.castPlayerState = media.playerState
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.playerController) {
|
||||
this.playerController.stop()
|
||||
}
|
||||
}
|
||||
|
||||
async set(audiobook, tracks, hlsStreamId, startTime, playWhenReady = false) {
|
||||
this.audiobook = audiobook
|
||||
this.audioTracks = tracks
|
||||
this.hlsStreamId = hlsStreamId
|
||||
this.playWhenReady = playWhenReady
|
||||
|
||||
this.currentTime = startTime
|
||||
|
||||
var coverImg = this.ctx.$store.getters['audiobooks/getBookCoverSrc'](audiobook)
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
this.coverUrl = coverImg
|
||||
} else {
|
||||
this.coverUrl = `${window.location.origin}/${coverImg}`
|
||||
}
|
||||
|
||||
var request = buildCastLoadRequest(this.audiobook, this.coverUrl, this.audioTracks, this.currentTime, playWhenReady, this.defaultPlaybackRate)
|
||||
|
||||
var castSession = cast.framework.CastContext.getInstance().getCurrentSession()
|
||||
await castLoadMedia(castSession, request)
|
||||
}
|
||||
|
||||
resetStream(startTime) {
|
||||
// Cast only direct play for now
|
||||
}
|
||||
|
||||
playPause() {
|
||||
if (this.playerController) this.playerController.playOrPause()
|
||||
}
|
||||
|
||||
play() {
|
||||
if (this.playerController) this.playerController.playOrPause()
|
||||
}
|
||||
|
||||
pause() {
|
||||
if (this.playerController) this.playerController.playOrPause()
|
||||
}
|
||||
|
||||
getCurrentTime() {
|
||||
var currentTrackOffset = this.currentTrack.startOffset || 0
|
||||
return this.player ? currentTrackOffset + this.player.currentTime : 0
|
||||
}
|
||||
|
||||
getDuration() {
|
||||
if (!this.audioTracks.length) return 0
|
||||
var lastTrack = this.audioTracks[this.audioTracks.length - 1]
|
||||
return lastTrack.startOffset + lastTrack.duration
|
||||
}
|
||||
|
||||
setPlaybackRate(playbackRate) {
|
||||
this.defaultPlaybackRate = playbackRate
|
||||
}
|
||||
|
||||
async seek(time, playWhenReady) {
|
||||
if (!this.player) return
|
||||
if (time < this.currentTrack.startOffset || time > this.currentTrack.startOffset + this.currentTrack.duration) {
|
||||
// Change Track
|
||||
var request = buildCastLoadRequest(this.audiobook, this.coverUrl, this.audioTracks, time, playWhenReady, this.defaultPlaybackRate)
|
||||
var castSession = cast.framework.CastContext.getInstance().getCurrentSession()
|
||||
await castLoadMedia(castSession, request)
|
||||
} else {
|
||||
var offsetTime = time - (this.currentTrack.startOffset || 0)
|
||||
this.player.currentTime = Math.max(0, offsetTime)
|
||||
this.playerController.seek()
|
||||
}
|
||||
}
|
||||
|
||||
setVolume(volume) {
|
||||
if (!this.player) return
|
||||
this.player.volumeLevel = volume
|
||||
this.playerController.setVolumeLevel()
|
||||
}
|
||||
}
|
||||
238
client/players/LocalPlayer.js
Normal file
238
client/players/LocalPlayer.js
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
import Hls from 'hls.js'
|
||||
import EventEmitter from 'events'
|
||||
|
||||
export default class LocalPlayer extends EventEmitter {
|
||||
constructor(ctx) {
|
||||
super()
|
||||
|
||||
this.ctx = ctx
|
||||
this.player = null
|
||||
|
||||
this.audiobook = null
|
||||
this.audioTracks = []
|
||||
this.currentTrackIndex = 0
|
||||
this.hlsStreamId = null
|
||||
this.hlsInstance = null
|
||||
this.usingNativeplayer = false
|
||||
this.currentTime = 0
|
||||
this.playWhenReady = false
|
||||
this.defaultPlaybackRate = 1
|
||||
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
get currentTrack() {
|
||||
return this.audioTracks[this.currentTrackIndex] || {}
|
||||
}
|
||||
|
||||
initialize() {
|
||||
if (document.getElementById('audio-player')) {
|
||||
document.getElementById('audio-player').remove()
|
||||
}
|
||||
var audioEl = document.createElement('audio')
|
||||
audioEl.id = 'audio-player'
|
||||
audioEl.style.display = 'none'
|
||||
document.body.appendChild(audioEl)
|
||||
this.player = audioEl
|
||||
|
||||
this.player.addEventListener('play', this.evtPlay.bind(this))
|
||||
this.player.addEventListener('pause', this.evtPause.bind(this))
|
||||
this.player.addEventListener('progress', this.evtProgress.bind(this))
|
||||
this.player.addEventListener('error', this.evtError.bind(this))
|
||||
this.player.addEventListener('loadedmetadata', this.evtLoadedMetadata.bind(this))
|
||||
this.player.addEventListener('timeupdate', this.evtTimeupdate.bind(this))
|
||||
}
|
||||
|
||||
evtPlay() {
|
||||
this.emit('stateChange', 'PLAYING')
|
||||
}
|
||||
evtPause() {
|
||||
this.emit('stateChange', 'PAUSED')
|
||||
}
|
||||
evtProgress() {
|
||||
var lastBufferTime = this.getLastBufferedTime()
|
||||
this.emit('buffertimeUpdate', lastBufferTime)
|
||||
}
|
||||
evtError(error) {
|
||||
console.error('Player error', error)
|
||||
}
|
||||
evtLoadedMetadata(data) {
|
||||
console.log('Audio Loaded Metadata', data)
|
||||
this.emit('stateChange', 'LOADED')
|
||||
if (this.playWhenReady) {
|
||||
this.playWhenReady = false
|
||||
this.play()
|
||||
}
|
||||
}
|
||||
evtTimeupdate() {
|
||||
if (this.player.paused) {
|
||||
this.emit('timeupdate', this.getCurrentTime())
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.hlsStreamId) {
|
||||
// Close HLS Stream
|
||||
console.log('Closing HLS Streams', this.hlsStreamId)
|
||||
this.ctx.$axios.$post(`/api/streams/${this.hlsStreamId}/close`).catch((error) => {
|
||||
console.error('Failed to request close hls stream', this.hlsStreamId, error)
|
||||
})
|
||||
}
|
||||
this.destroyHlsInstance()
|
||||
if (this.player) {
|
||||
this.player.remove()
|
||||
}
|
||||
}
|
||||
|
||||
set(audiobook, tracks, hlsStreamId, startTime, playWhenReady = false) {
|
||||
this.audiobook = audiobook
|
||||
this.audioTracks = tracks
|
||||
this.hlsStreamId = hlsStreamId
|
||||
this.playWhenReady = playWhenReady
|
||||
if (this.hlsInstance) {
|
||||
this.destroyHlsInstance()
|
||||
}
|
||||
|
||||
this.currentTime = startTime
|
||||
|
||||
// iOS does not support Media Elements but allows for HLS in the native audio player
|
||||
if (!Hls.isSupported()) {
|
||||
console.warn('HLS is not supported - fallback to using audio element')
|
||||
this.usingNativeplayer = true
|
||||
this.player.src = this.currentTrack.fullContentUrl
|
||||
this.player.currentTime = this.currentTime
|
||||
return
|
||||
}
|
||||
|
||||
var hlsOptions = {
|
||||
startPosition: this.currentTime || -1
|
||||
// No longer needed because token is put in a query string
|
||||
// xhrSetup: (xhr) => {
|
||||
// xhr.setRequestHeader('Authorization', `Bearer ${this.token}`)
|
||||
// }
|
||||
}
|
||||
this.hlsInstance = new Hls(hlsOptions)
|
||||
|
||||
this.hlsInstance.attachMedia(this.player)
|
||||
this.hlsInstance.on(Hls.Events.MEDIA_ATTACHED, () => {
|
||||
this.hlsInstance.loadSource(this.currentTrack.fullContentUrl)
|
||||
|
||||
this.hlsInstance.on(Hls.Events.MANIFEST_PARSED, () => {
|
||||
console.log('[HLS] Manifest Parsed')
|
||||
})
|
||||
|
||||
this.hlsInstance.on(Hls.Events.ERROR, (e, data) => {
|
||||
console.error('[HLS] Error', data.type, data.details, data)
|
||||
if (data.details === Hls.ErrorDetails.BUFFER_STALLED_ERROR) {
|
||||
console.error('[HLS] BUFFER STALLED ERROR')
|
||||
}
|
||||
})
|
||||
this.hlsInstance.on(Hls.Events.DESTROYING, () => {
|
||||
console.log('[HLS] Destroying HLS Instance')
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
destroyHlsInstance() {
|
||||
if (!this.hlsInstance) return
|
||||
if (this.hlsInstance.destroy) {
|
||||
var temp = this.hlsInstance
|
||||
temp.destroy()
|
||||
}
|
||||
this.hlsInstance = null
|
||||
}
|
||||
|
||||
async resetStream(startTime) {
|
||||
this.destroyHlsInstance()
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
this.set(this.audiobook, this.audioTracks, this.hlsStreamId, startTime, true)
|
||||
}
|
||||
|
||||
playPause() {
|
||||
if (!this.player) return
|
||||
if (this.player.paused) this.play()
|
||||
else this.pause()
|
||||
}
|
||||
|
||||
play() {
|
||||
if (this.player) this.player.play()
|
||||
}
|
||||
|
||||
pause() {
|
||||
if (this.player) this.player.pause()
|
||||
}
|
||||
|
||||
getCurrentTime() {
|
||||
var currentTrackOffset = this.currentTrack.startOffset || 0
|
||||
return this.player ? currentTrackOffset + this.player.currentTime : 0
|
||||
}
|
||||
|
||||
getDuration() {
|
||||
if (!this.audioTracks.length) return 0
|
||||
var lastTrack = this.audioTracks[this.audioTracks.length - 1]
|
||||
return lastTrack.startOffset + lastTrack.duration
|
||||
}
|
||||
|
||||
setPlaybackRate(playbackRate) {
|
||||
if (!this.player) return
|
||||
this.defaultPlaybackRate = playbackRate
|
||||
this.player.playbackRate = playbackRate
|
||||
}
|
||||
|
||||
seek(time) {
|
||||
if (!this.player) return
|
||||
var offsetTime = time - (this.currentTrack.startOffset || 0)
|
||||
this.player.currentTime = Math.max(0, offsetTime)
|
||||
}
|
||||
|
||||
setVolume(volume) {
|
||||
if (!this.player) return
|
||||
this.player.volume = volume
|
||||
}
|
||||
|
||||
|
||||
// Utils
|
||||
isValidDuration(duration) {
|
||||
if (duration && !isNaN(duration) && duration !== Number.POSITIVE_INFINITY && duration !== Number.NEGATIVE_INFINITY) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
getBufferedRanges() {
|
||||
if (!this.player) return []
|
||||
const ranges = []
|
||||
const seekable = this.player.buffered || []
|
||||
|
||||
let offset = 0
|
||||
|
||||
for (let i = 0, length = seekable.length; i < length; i++) {
|
||||
let start = seekable.start(i)
|
||||
let end = seekable.end(i)
|
||||
if (!this.isValidDuration(start)) {
|
||||
start = 0
|
||||
}
|
||||
if (!this.isValidDuration(end)) {
|
||||
end = 0
|
||||
continue
|
||||
}
|
||||
|
||||
ranges.push({
|
||||
start: start + offset,
|
||||
end: end + offset
|
||||
})
|
||||
}
|
||||
return ranges
|
||||
}
|
||||
|
||||
getLastBufferedTime() {
|
||||
var bufferedRanges = this.getBufferedRanges()
|
||||
if (!bufferedRanges.length) return 0
|
||||
|
||||
var buff = bufferedRanges.find((buff) => buff.start < this.player.currentTime && buff.end > this.player.currentTime)
|
||||
if (buff) return buff.end
|
||||
|
||||
var last = bufferedRanges[bufferedRanges.length - 1]
|
||||
return last.end
|
||||
}
|
||||
}
|
||||
306
client/players/PlayerHandler.js
Normal file
306
client/players/PlayerHandler.js
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
import LocalPlayer from './LocalPlayer'
|
||||
import CastPlayer from './CastPlayer'
|
||||
import AudioTrack from './AudioTrack'
|
||||
|
||||
export default class PlayerHandler {
|
||||
constructor(ctx) {
|
||||
this.ctx = ctx
|
||||
this.audiobook = null
|
||||
this.playWhenReady = false
|
||||
this.player = null
|
||||
this.playerState = 'IDLE'
|
||||
this.currentStreamId = null
|
||||
this.startTime = 0
|
||||
|
||||
this.lastSyncTime = 0
|
||||
this.lastSyncedAt = 0
|
||||
this.listeningTimeSinceSync = 0
|
||||
|
||||
this.playInterval = null
|
||||
}
|
||||
|
||||
get isCasting() {
|
||||
return this.ctx.$store.state.globals.isCasting
|
||||
}
|
||||
get isPlayingCastedAudiobook() {
|
||||
return this.audiobook && (this.player instanceof CastPlayer)
|
||||
}
|
||||
get isPlayingLocalAudiobook() {
|
||||
return this.audiobook && (this.player instanceof LocalPlayer)
|
||||
}
|
||||
get userToken() {
|
||||
return this.ctx.$store.getters['user/getToken']
|
||||
}
|
||||
get playerPlaying() {
|
||||
return this.playerState === 'PLAYING'
|
||||
}
|
||||
|
||||
load(audiobook, playWhenReady, startTime = 0) {
|
||||
if (!this.player) this.switchPlayer()
|
||||
|
||||
console.log('Load audiobook', audiobook)
|
||||
this.audiobook = audiobook
|
||||
this.startTime = startTime
|
||||
this.playWhenReady = playWhenReady
|
||||
this.prepare()
|
||||
}
|
||||
|
||||
switchPlayer() {
|
||||
if (this.isCasting && !(this.player instanceof CastPlayer)) {
|
||||
console.log('[PlayerHandler] Switching to cast player')
|
||||
|
||||
this.stopPlayInterval()
|
||||
this.playerStateChange('LOADING')
|
||||
|
||||
this.startTime = this.player ? this.player.getCurrentTime() : this.startTime
|
||||
if (this.player) {
|
||||
this.player.destroy()
|
||||
}
|
||||
this.player = new CastPlayer(this.ctx)
|
||||
this.setPlayerListeners()
|
||||
|
||||
if (this.audiobook) {
|
||||
// Audiobook was already loaded - prepare for cast
|
||||
this.playWhenReady = false
|
||||
this.prepare()
|
||||
}
|
||||
} else if (!this.isCasting && !(this.player instanceof LocalPlayer)) {
|
||||
console.log('[PlayerHandler] Switching to local player')
|
||||
|
||||
this.stopPlayInterval()
|
||||
this.playerStateChange('LOADING')
|
||||
|
||||
if (this.player) {
|
||||
this.player.destroy()
|
||||
}
|
||||
this.player = new LocalPlayer(this.ctx)
|
||||
this.setPlayerListeners()
|
||||
|
||||
if (this.audiobook) {
|
||||
// Audiobook was already loaded - prepare for local play
|
||||
this.playWhenReady = false
|
||||
this.prepare()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setPlayerListeners() {
|
||||
this.player.on('stateChange', this.playerStateChange.bind(this))
|
||||
this.player.on('timeupdate', this.playerTimeupdate.bind(this))
|
||||
this.player.on('buffertimeUpdate', this.playerBufferTimeUpdate.bind(this))
|
||||
}
|
||||
|
||||
playerStateChange(state) {
|
||||
console.log('[PlayerHandler] Player state change', state)
|
||||
this.playerState = state
|
||||
if (this.playerState === 'PLAYING') {
|
||||
this.startPlayInterval()
|
||||
} else {
|
||||
this.stopPlayInterval()
|
||||
}
|
||||
if (this.playerState === 'LOADED' || this.playerState === 'PLAYING') {
|
||||
this.ctx.setDuration(this.player.getDuration())
|
||||
}
|
||||
if (this.playerState !== 'LOADING') {
|
||||
this.ctx.setCurrentTime(this.player.getCurrentTime())
|
||||
}
|
||||
|
||||
this.ctx.isPlaying = this.playerState === 'PLAYING'
|
||||
this.ctx.playerLoading = this.playerState === 'LOADING'
|
||||
}
|
||||
|
||||
playerTimeupdate(time) {
|
||||
this.ctx.setCurrentTime(time)
|
||||
}
|
||||
|
||||
playerBufferTimeUpdate(buffertime) {
|
||||
this.ctx.setBufferTime(buffertime)
|
||||
}
|
||||
|
||||
async prepare() {
|
||||
var useHls = !this.isCasting
|
||||
if (useHls) {
|
||||
var stream = await this.ctx.$axios.$get(`/api/books/${this.audiobook.id}/stream`).catch((error) => {
|
||||
console.error('Failed to start stream', error)
|
||||
})
|
||||
if (stream) {
|
||||
console.log(`[PlayerHandler] prepare hls stream`, stream)
|
||||
this.setHlsStream(stream)
|
||||
}
|
||||
} else {
|
||||
// Setup tracks
|
||||
var runningTotal = 0
|
||||
var audioTracks = (this.audiobook.tracks || []).map((track) => {
|
||||
var audioTrack = new AudioTrack(track)
|
||||
audioTrack.startOffset = runningTotal
|
||||
audioTrack.contentUrl = `/lib/${this.audiobook.libraryId}/${this.audiobook.folderId}/${track.path}?token=${this.userToken}`
|
||||
audioTrack.mimeType = (track.codec === 'm4b' || track.codec === 'm4a') ? 'audio/mp4' : `audio/${track.codec}`
|
||||
|
||||
runningTotal += audioTrack.duration
|
||||
return audioTrack
|
||||
})
|
||||
this.setDirectPlay(audioTracks)
|
||||
}
|
||||
}
|
||||
|
||||
closePlayer() {
|
||||
console.log('[PlayerHandler] CLose Player')
|
||||
if (this.player) {
|
||||
this.player.destroy()
|
||||
}
|
||||
this.player = null
|
||||
this.playerState = 'IDLE'
|
||||
this.audiobook = null
|
||||
this.currentStreamId = null
|
||||
this.startTime = 0
|
||||
this.stopPlayInterval()
|
||||
}
|
||||
|
||||
prepareStream(stream) {
|
||||
if (!this.player) this.switchPlayer()
|
||||
this.audiobook = stream.audiobook
|
||||
this.setHlsStream({
|
||||
streamId: stream.id,
|
||||
streamUrl: stream.clientPlaylistUri,
|
||||
startTime: stream.clientCurrentTime
|
||||
})
|
||||
}
|
||||
|
||||
setHlsStream(stream) {
|
||||
this.currentStreamId = stream.streamId
|
||||
var audioTrack = new AudioTrack({
|
||||
duration: this.audiobook.duration,
|
||||
contentUrl: stream.streamUrl + '?token=' + this.userToken,
|
||||
mimeType: 'application/vnd.apple.mpegurl'
|
||||
})
|
||||
this.startTime = stream.startTime
|
||||
this.ctx.playerLoading = true
|
||||
this.player.set(this.audiobook, [audioTrack], this.currentStreamId, stream.startTime, this.playWhenReady)
|
||||
}
|
||||
|
||||
setDirectPlay(audioTracks) {
|
||||
this.currentStreamId = null
|
||||
this.ctx.playerLoading = true
|
||||
this.player.set(this.audiobook, audioTracks, null, this.startTime, this.playWhenReady)
|
||||
}
|
||||
|
||||
resetStream(startTime, streamId) {
|
||||
if (this.currentStreamId === streamId) {
|
||||
this.player.resetStream(startTime)
|
||||
} else {
|
||||
console.warn('resetStream mismatch streamId', this.currentStreamId, streamId)
|
||||
}
|
||||
}
|
||||
|
||||
startPlayInterval() {
|
||||
clearInterval(this.playInterval)
|
||||
var lastTick = Date.now()
|
||||
this.playInterval = setInterval(() => {
|
||||
// Update UI
|
||||
if (!this.player) return
|
||||
var currentTime = this.player.getCurrentTime()
|
||||
this.ctx.setCurrentTime(currentTime)
|
||||
|
||||
var exactTimeElapsed = ((Date.now() - lastTick) / 1000)
|
||||
lastTick = Date.now()
|
||||
this.listeningTimeSinceSync += exactTimeElapsed
|
||||
if (this.listeningTimeSinceSync >= 5) {
|
||||
this.sendProgressSync(currentTime)
|
||||
this.listeningTimeSinceSync = 0
|
||||
}
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
sendProgressSync(currentTime) {
|
||||
var diffSinceLastSync = Math.abs(this.lastSyncTime - currentTime)
|
||||
if (diffSinceLastSync < 1) return
|
||||
|
||||
this.lastSyncTime = currentTime
|
||||
if (this.currentStreamId) { // Updating stream progress (HLS stream)
|
||||
var listeningTimeToAdd = Math.max(0, Math.floor(this.listeningTimeSinceSync))
|
||||
var syncData = {
|
||||
timeListened: listeningTimeToAdd,
|
||||
currentTime,
|
||||
streamId: this.currentStreamId,
|
||||
audiobookId: this.audiobook.id
|
||||
}
|
||||
this.ctx.$axios.$post('/api/syncStream', syncData, { timeout: 1000 }).catch((error) => {
|
||||
console.error('Failed to update stream progress', error)
|
||||
})
|
||||
} else {
|
||||
// Direct play via chromecast does not yet have backend stream session model
|
||||
// so the progress update for the audiobook is updated this way (instead of through the stream)
|
||||
var duration = this.getDuration()
|
||||
var syncData = {
|
||||
totalDuration: duration,
|
||||
currentTime,
|
||||
progress: duration > 0 ? currentTime / duration : 0,
|
||||
isRead: false,
|
||||
audiobookId: this.audiobook.id,
|
||||
lastUpdate: Date.now()
|
||||
}
|
||||
this.ctx.$axios.$post('/api/syncLocal', syncData, { timeout: 1000 }).catch((error) => {
|
||||
console.error('Failed to update local progress', error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
stopPlayInterval() {
|
||||
clearInterval(this.playInterval)
|
||||
this.playInterval = null
|
||||
}
|
||||
|
||||
playPause() {
|
||||
if (this.player) this.player.playPause()
|
||||
}
|
||||
|
||||
play() {
|
||||
if (!this.player) return
|
||||
this.player.play()
|
||||
}
|
||||
|
||||
pause() {
|
||||
if (this.player) this.player.pause()
|
||||
}
|
||||
|
||||
getCurrentTime() {
|
||||
return this.player ? this.player.getCurrentTime() : 0
|
||||
}
|
||||
|
||||
getDuration() {
|
||||
return this.player ? this.player.getDuration() : 0
|
||||
}
|
||||
|
||||
jumpBackward() {
|
||||
if (!this.player) return
|
||||
var currentTime = this.getCurrentTime()
|
||||
this.seek(Math.max(0, currentTime - 10))
|
||||
}
|
||||
|
||||
jumpForward() {
|
||||
if (!this.player) return
|
||||
var currentTime = this.getCurrentTime()
|
||||
this.seek(Math.min(currentTime + 10, this.getDuration()))
|
||||
}
|
||||
|
||||
setVolume(volume) {
|
||||
if (!this.player) return
|
||||
this.player.setVolume(volume)
|
||||
}
|
||||
|
||||
setPlaybackRate(playbackRate) {
|
||||
if (!this.player) return
|
||||
this.player.setPlaybackRate(playbackRate)
|
||||
}
|
||||
|
||||
seek(time) {
|
||||
if (!this.player) return
|
||||
this.player.seek(time, this.playerPlaying)
|
||||
this.ctx.setCurrentTime(time)
|
||||
|
||||
// Update progress if paused
|
||||
if (!this.playerPlaying) {
|
||||
this.sendProgressSync(time)
|
||||
}
|
||||
}
|
||||
}
|
||||
74
client/players/castUtils.js
Normal file
74
client/players/castUtils.js
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
|
||||
function getMediaInfoFromTrack(audiobook, castImage, track) {
|
||||
// https://developers.google.com/cast/docs/reference/web_sender/chrome.cast.media.AudiobookChapterMediaMetadata
|
||||
var metadata = new chrome.cast.media.AudiobookChapterMediaMetadata()
|
||||
metadata.bookTitle = audiobook.book.title
|
||||
metadata.chapterNumber = track.index
|
||||
metadata.chapterTitle = track.title
|
||||
metadata.images = [castImage]
|
||||
metadata.title = track.title
|
||||
metadata.subtitle = audiobook.book.title
|
||||
|
||||
var trackurl = track.fullContentUrl
|
||||
var mimeType = track.mimeType
|
||||
var mediainfo = new chrome.cast.media.MediaInfo(trackurl, mimeType)
|
||||
mediainfo.metadata = metadata
|
||||
mediainfo.itemId = track.index
|
||||
mediainfo.duration = track.duration
|
||||
return mediainfo
|
||||
}
|
||||
|
||||
function buildCastMediaInfo(audiobook, coverUrl, tracks) {
|
||||
const castImage = new chrome.cast.Image(coverUrl)
|
||||
return tracks.map(t => getMediaInfoFromTrack(audiobook, castImage, t))
|
||||
}
|
||||
|
||||
function buildCastQueueRequest(audiobook, coverUrl, tracks, startTime) {
|
||||
var mediaInfoItems = buildCastMediaInfo(audiobook, coverUrl, tracks)
|
||||
|
||||
var containerMetadata = new chrome.cast.media.AudiobookContainerMetadata()
|
||||
containerMetadata.authors = [audiobook.book.authorFL]
|
||||
containerMetadata.narrators = [audiobook.book.narratorFL]
|
||||
containerMetadata.publisher = audiobook.book.publisher || undefined
|
||||
|
||||
var mediaQueueItems = mediaInfoItems.map((mi) => {
|
||||
var queueItem = new chrome.cast.media.QueueItem(mi)
|
||||
return queueItem
|
||||
})
|
||||
|
||||
// Find track to start playback and calculate track start offset
|
||||
var track = tracks.find(at => at.startOffset <= startTime && at.startOffset + at.duration > startTime)
|
||||
var trackStartIndex = track ? track.index - 1 : 0
|
||||
var trackStartTime = Math.floor(track ? startTime - track.startOffset : 0)
|
||||
|
||||
var queueData = new chrome.cast.media.QueueData(audiobook.id, audiobook.book.title, '', false, mediaQueueItems, trackStartIndex, trackStartTime)
|
||||
queueData.containerMetadata = containerMetadata
|
||||
queueData.queueType = chrome.cast.media.QueueType.AUDIOBOOK
|
||||
return queueData
|
||||
}
|
||||
|
||||
function castLoadMedia(castSession, request) {
|
||||
return new Promise((resolve) => {
|
||||
castSession.loadMedia(request)
|
||||
.then(() => resolve(true), (reason) => {
|
||||
console.error('Load media failed', reason)
|
||||
resolve(false)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function buildCastLoadRequest(audiobook, coverUrl, tracks, startTime, autoplay, playbackRate) {
|
||||
var request = new chrome.cast.media.LoadRequest()
|
||||
|
||||
request.queueData = buildCastQueueRequest(audiobook, coverUrl, tracks, startTime)
|
||||
request.currentTime = request.queueData.startTime
|
||||
|
||||
request.autoplay = autoplay
|
||||
request.playbackRate = playbackRate
|
||||
return request
|
||||
}
|
||||
|
||||
export {
|
||||
buildCastLoadRequest,
|
||||
castLoadMedia
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue