Fix transcoded track and add info if there are no track sources

environments/review-docs-renov-b1i8ag/deployments/15025
wvffle 2022-10-28 20:04:02 +00:00 zatwierdzone przez Georg Krause
rodzic ab8699783d
commit 34d4f3b25b
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 2970D504B2183D22
3 zmienionych plików z 54 dodań i 9 usunięć

Wyświetl plik

@ -18,6 +18,7 @@ export interface Sound {
readonly isErrored: Ref<boolean>
readonly isLoaded: Ref<boolean>
readonly currentTime: number
readonly playable: boolean
readonly duration: number
readonly buffered: number
looping: boolean
@ -54,9 +55,18 @@ export class HTMLSound implements Sound {
onSoundEnd: EventHookOn<HTMLSound>
constructor (sources: SoundSource[]) {
this.onSoundLoop = this.#soundLoopEventHook.on
this.onSoundEnd = this.#soundEndEventHook.on
// TODO: Quality picker
const source = sources[0]?.url
if (!source) {
this.isLoaded.value = true
return
}
this.#audio.crossOrigin = 'anonymous'
this.#audio.src = sources[0].url
this.#audio.src = source
this.#audio.preload = 'auto'
useEventListener(this.#audio, 'ended', () => this.#soundEndEventHook.trigger(this))
@ -75,9 +85,6 @@ export class HTMLSound implements Sound {
this.isErrored.value = true
this.isLoaded.value = true
})
this.onSoundLoop = this.#soundLoopEventHook.on
this.onSoundEnd = this.#soundEndEventHook.on
}
preload () {
@ -104,6 +111,10 @@ export class HTMLSound implements Sound {
this.#audio.currentTime += seconds
}
get playable () {
return this.#audio.src !== ''
}
get duration () {
const { duration } = this.#audio
return isNaN(duration) ? 0 : duration

Wyświetl plik

@ -9,6 +9,7 @@ import { useRouter } from 'vue-router'
import { useStore } from '~/store'
import { usePlayer } from '~/composables/audio/player'
import { useTracks } from '~/composables/audio/tracks'
import { useQueue } from '~/composables/audio/queue'
import time from '~/utils/time'
@ -42,6 +43,8 @@ const {
clear
} = useQueue()
const { currentSound } = useTracks()
const queueModal = ref()
const { activate, deactivate } = useFocusTrap(queueModal, { allowOutsideClick: true, preventScroll: true })
@ -204,7 +207,7 @@ const hideArtist = () => {
The track cannot be loaded
</translate>
</h3>
<p v-if="hasNext && isPlaying && errored">
<p v-if="hasNext && isPlaying">
<translate translate-context="Sidebar/Player/Error message.Paragraph">
The next track will play automatically in a few seconds
</translate>
@ -216,6 +219,22 @@ const hideArtist = () => {
</translate>
</p>
</div>
<div
v-else-if="currentSound && !currentSound.playable"
class="ui small warning message"
>
<h3 class="header">
<translate translate-context="Sidebar/Player/No sources">
The track has no available sources
</translate>
</h3>
<p v-if="hasNext && isPlaying">
<translate translate-context="Sidebar/Player/Error message.Paragraph">
The next track will play automatically in a few seconds
</translate>
<i class="loading spinner icon" />
</p>
</div>
<div class="additional-controls desktop-and-below">
<track-favorite-icon
v-if="$store.state.auth.authenticated"

Wyświetl plik

@ -20,9 +20,6 @@ const soundCache = useLRUCache<number, Sound>({ max: 10 })
const getTrackSources = (track: QueueTrack): QueueTrackSource[] => {
const sources: QueueTrackSource[] = track.sources
// NOTE: Filter out repeating and unplayable media types
.filter(({ mimetype, bitrate }, index, array) => array.findIndex((upload) => upload.mimetype + upload.bitrate === mimetype + bitrate) === index)
.filter(({ mimetype }) => ALLOWED_PLAY_TYPES.includes(AUDIO_ELEMENT.canPlayType(`${mimetype}`)))
.map((source) => ({
...source,
url: store.getters['instance/absoluteUrl'](source.url) as string
@ -40,6 +37,9 @@ const getTrackSources = (track: QueueTrack): QueueTrackSource[] => {
}
return sources
// NOTE: Filter out repeating and unplayable media types
.filter(({ mimetype, bitrate }, index, array) => array.findIndex((upload) => upload.mimetype + upload.bitrate === mimetype + bitrate) === index)
.filter(({ mimetype }) => ALLOWED_PLAY_TYPES.includes(AUDIO_ELEMENT.canPlayType(`${mimetype}`)))
}
// Use Tracks
@ -59,12 +59,14 @@ export const useTracks = createGlobalState(() => {
const SoundImplementation = soundImplementation.value
const sound = new SoundImplementation(sources)
sound.onSoundEnd(() => {
console.log('TRACK ENDED, PLAYING NEXT')
// NOTE: We push it to the end of the job queue
setTimeout(() => playNext(), 0)
})
soundCache.set(track.id, sound)
soundPromises.delete(track.id)
return sound
@ -87,12 +89,25 @@ export const useTracks = createGlobalState(() => {
const createTrack = async (index: number) => {
stopPreloadTimeout()
const { queue, currentIndex } = useQueue()
const { queue, currentIndex, playNext, hasNext } = useQueue()
if (queue.value.length <= index || index === -1) return
console.log('LOADING TRACK', index)
const track = queue.value[index]
const sound = await createSound(track)
if (!sound.playable) {
setTimeout(() => {
if (hasNext.value && index !== queue.value.length - 1) {
return playNext(true)
}
const { isPlaying } = usePlayer()
isPlaying.value = false
}, 3000)
return
}
console.log('CONNECTING NODE')
sound.audioNode.disconnect()