mirror of
https://git.stupid.fish/teidesu/scripts.git
synced 2025-11-27 16:31:25 +11:00
chore: update public repo
This commit is contained in:
parent
261c7eefa0
commit
171ba5de7a
3 changed files with 520 additions and 3 deletions
375
scripts/media/tidal-dl.ts
Normal file
375
scripts/media/tidal-dl.ts
Normal file
|
|
@ -0,0 +1,375 @@
|
||||||
|
import { randomUUID } from 'node:crypto'
|
||||||
|
import { mkdir, rm, writeFile } from 'node:fs/promises'
|
||||||
|
import { dirname, join } from 'node:path'
|
||||||
|
import { asyncPool, base64, todo, unknownToError, utf8 } from '@fuman/utils'
|
||||||
|
import Spinnies from 'spinnies'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { $, question } from 'zx'
|
||||||
|
import { ffetch as ffetchBase } from '../../utils/fetch.ts'
|
||||||
|
import { sanitizeFilename } from '../../utils/fs.ts'
|
||||||
|
import { pipeIntoProc, runMetaflac, writeIntoProc } from '../../utils/media-metadata.ts'
|
||||||
|
import { getEnv } from '../../utils/misc.ts'
|
||||||
|
import { concatMpdSegments, parseSimpleMpd } from '../../utils/mpd.ts'
|
||||||
|
import { createLibcurlFetch } from '../../utils/temkakit/libcurl.ts'
|
||||||
|
|
||||||
|
const oauthResponse = await ffetchBase('https://auth.tidal.com/v1/oauth2/token', {
|
||||||
|
form: {
|
||||||
|
client_id: '49YxDN9a2aFV6RTG',
|
||||||
|
grant_type: 'refresh_token',
|
||||||
|
scope: 'r_usr w_usr',
|
||||||
|
refresh_token: getEnv('TIDAL_REFRESH_TOKEN'),
|
||||||
|
},
|
||||||
|
}).parsedJson(z.object({
|
||||||
|
access_token: z.string(),
|
||||||
|
user: z.object({
|
||||||
|
username: z.string(),
|
||||||
|
countryCode: z.string(),
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
console.log('Logged in as %s', oauthResponse.user.username)
|
||||||
|
|
||||||
|
const ffetch = ffetchBase.extend({
|
||||||
|
headers: {
|
||||||
|
'accept': '*/*',
|
||||||
|
'Authorization': `Bearer ${oauthResponse.access_token}`,
|
||||||
|
'accept-language': 'en-US,en;q=0.5',
|
||||||
|
'accept-encoding': 'gzip, deflate, br',
|
||||||
|
'referer': 'https://tidal.com/',
|
||||||
|
'origin': 'https://tidal.com',
|
||||||
|
'sec-fetch-dest': 'empty',
|
||||||
|
'sec-fetch-mode': 'cors',
|
||||||
|
'sec-fetch-site': 'same-origin',
|
||||||
|
},
|
||||||
|
// for some reason the request sometimes hangs indefinitely, so we need to timeout
|
||||||
|
timeout: 5000,
|
||||||
|
retry: {
|
||||||
|
maxRetries: 10,
|
||||||
|
// onError: (err, req) => {
|
||||||
|
// console.log('%s: error: %s', req.url, err)
|
||||||
|
// return true
|
||||||
|
// },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const PlaybackInfoResult = z.object({
|
||||||
|
albumPeakAmplitude: z.number(),
|
||||||
|
albumReplayGain: z.number(),
|
||||||
|
assetPresentation: z.string(),
|
||||||
|
audioMode: z.string(),
|
||||||
|
audioQuality: z.enum(['HIGH', 'LOSSLESS', 'HI_RES_LOSSLESS']),
|
||||||
|
bitDepth: z.number(),
|
||||||
|
manifest: z.string(),
|
||||||
|
manifestHash: z.string(),
|
||||||
|
manifestMimeType: z.literal('application/dash+xml'),
|
||||||
|
sampleRate: z.number(),
|
||||||
|
streamingSessionId: z.string(),
|
||||||
|
trackId: z.number(),
|
||||||
|
trackPeakAmplitude: z.number(),
|
||||||
|
trackReplayGain: z.number(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const streamingSessionId = randomUUID()
|
||||||
|
|
||||||
|
const TidalTrack = z.object({
|
||||||
|
id: z.number(),
|
||||||
|
album: z.object({
|
||||||
|
id: z.number(),
|
||||||
|
cover: z.string(),
|
||||||
|
}),
|
||||||
|
artists: z.array(z.object({
|
||||||
|
id: z.number(),
|
||||||
|
name: z.string(),
|
||||||
|
})),
|
||||||
|
isrc: z.string().nullable(),
|
||||||
|
trackNumber: z.number(),
|
||||||
|
volumeNumber: z.number(),
|
||||||
|
title: z.string(),
|
||||||
|
copyright: z.string().nullable(),
|
||||||
|
version: z.string().nullable(),
|
||||||
|
bpm: z.number().nullable(),
|
||||||
|
})
|
||||||
|
type TidalTrack = z.infer<typeof TidalTrack>
|
||||||
|
|
||||||
|
function getTrackName(track: TidalTrack) {
|
||||||
|
let name = track.title
|
||||||
|
if (track.version) {
|
||||||
|
name += ` ${track.version}`
|
||||||
|
}
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
function getTrackArtistString(track: TidalTrack | TidalAlbum) {
|
||||||
|
return track.artists.map(it => it.name).join(', ')
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAlbumCoverUrl(uuid: string) {
|
||||||
|
return `https://resources.tidal.com/images/${uuid.replace(/-/g, '/')}/1280x1280.jpg`
|
||||||
|
}
|
||||||
|
|
||||||
|
const TidalAlbum = z.object({
|
||||||
|
id: z.number(),
|
||||||
|
title: z.string(),
|
||||||
|
cover: z.string(),
|
||||||
|
releaseDate: z.string(),
|
||||||
|
artists: z.array(z.object({
|
||||||
|
id: z.number(),
|
||||||
|
name: z.string(),
|
||||||
|
})),
|
||||||
|
})
|
||||||
|
type TidalAlbum = z.infer<typeof TidalAlbum>
|
||||||
|
|
||||||
|
const COMMON_QUERY = {
|
||||||
|
countryCode: oauthResponse.user.countryCode,
|
||||||
|
locale: 'en_US',
|
||||||
|
deviceType: 'BROWSER',
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadTrack(options: {
|
||||||
|
track: TidalTrack
|
||||||
|
album: TidalAlbum
|
||||||
|
albumCoverPath?: string
|
||||||
|
destination: string
|
||||||
|
}) {
|
||||||
|
const { track, album, albumCoverPath, destination } = options
|
||||||
|
const [playbackRes, lyricsRes, creditsRes] = [
|
||||||
|
await ffetch(`https://tidal.com/v1/tracks/${track.id}/playbackinfo`, {
|
||||||
|
query: {
|
||||||
|
audioquality: 'HI_RES_LOSSLESS',
|
||||||
|
playbackmode: 'STREAM',
|
||||||
|
assetpresentation: 'FULL',
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
'x-tidal-streamingsessionid': streamingSessionId,
|
||||||
|
'x-tidal-token': '49YxDN9a2aFV6RTG',
|
||||||
|
},
|
||||||
|
}).parsedJson(PlaybackInfoResult),
|
||||||
|
await ffetch(`https://tidal.com/v1/tracks/${track.id}/lyrics`, {
|
||||||
|
query: {
|
||||||
|
...COMMON_QUERY,
|
||||||
|
},
|
||||||
|
}).parsedJson(z.object({
|
||||||
|
lyrics: z.string(),
|
||||||
|
// subtitles = timestamped lyrics
|
||||||
|
subtitles: z.string().nullable(),
|
||||||
|
})).catch(() => null),
|
||||||
|
await ffetch(`https://tidal.com/v1/tracks/${track.id}/credits`, {
|
||||||
|
query: {
|
||||||
|
limit: 100,
|
||||||
|
includeContributors: true,
|
||||||
|
...COMMON_QUERY,
|
||||||
|
},
|
||||||
|
}).parsedJson(z.array(z.object({
|
||||||
|
type: z.string(),
|
||||||
|
contributors: z.array(z.object({
|
||||||
|
id: z.number(),
|
||||||
|
name: z.string(),
|
||||||
|
})),
|
||||||
|
}))),
|
||||||
|
]
|
||||||
|
|
||||||
|
const manifest = base64.decode(playbackRes.manifest)
|
||||||
|
|
||||||
|
const ext = playbackRes.audioQuality === 'HIGH' ? 'm4a' : 'flac'
|
||||||
|
const destFile = `${destination}.${ext}`
|
||||||
|
|
||||||
|
await mkdir(dirname(destFile), { recursive: true })
|
||||||
|
|
||||||
|
const lyricsLrc = lyricsRes ? lyricsRes.subtitles ?? lyricsRes.lyrics : undefined
|
||||||
|
const keyedCredits = creditsRes
|
||||||
|
? Object.fromEntries(creditsRes.map(it => [it.type, it.contributors.map(it => it.name)]))
|
||||||
|
: undefined
|
||||||
|
|
||||||
|
const params: string[] = [
|
||||||
|
'-y',
|
||||||
|
'-i',
|
||||||
|
'pipe:0',
|
||||||
|
'-c',
|
||||||
|
'copy',
|
||||||
|
'-loglevel',
|
||||||
|
'error',
|
||||||
|
'-hide_banner',
|
||||||
|
destFile,
|
||||||
|
]
|
||||||
|
|
||||||
|
const proc = $`ffmpeg ${params}`
|
||||||
|
await pipeIntoProc(proc, concatMpdSegments({
|
||||||
|
mpd: parseSimpleMpd(utf8.decoder.decode(manifest)),
|
||||||
|
fetch: async url => new Uint8Array(await ffetch(url).arrayBuffer()),
|
||||||
|
}))
|
||||||
|
await proc
|
||||||
|
|
||||||
|
if (ext === 'flac') {
|
||||||
|
await runMetaflac({
|
||||||
|
path: destFile,
|
||||||
|
tags: {
|
||||||
|
TITLE: getTrackName(track),
|
||||||
|
ALBUM: album.title,
|
||||||
|
DATE: album.releaseDate,
|
||||||
|
DISCNUMBER: track.volumeNumber,
|
||||||
|
TRACKNUMBER: track.trackNumber,
|
||||||
|
COMMENT: `ripped from tidal (id: ${track.id})`,
|
||||||
|
ARTIST: track.artists.map(it => it.name),
|
||||||
|
COPYRIGHT: track.copyright,
|
||||||
|
LYRICS: lyricsLrc,
|
||||||
|
REPLAYGAIN_ALBUM_GAIN: playbackRes.albumReplayGain,
|
||||||
|
REPLAYGAIN_ALBUM_PEAK: playbackRes.albumPeakAmplitude,
|
||||||
|
REPLAYGAIN_TRACK_GAIN: playbackRes.trackReplayGain,
|
||||||
|
REPLAYGAIN_TRACK_PEAK: playbackRes.trackPeakAmplitude,
|
||||||
|
PRODUCER: keyedCredits?.Producer,
|
||||||
|
COMPOSER: keyedCredits?.Composer,
|
||||||
|
LYRICIST: keyedCredits?.Lyricist,
|
||||||
|
PERFORMER: keyedCredits?.['Vocal accompaniment']?.map(it => `${it} (Vocal)`),
|
||||||
|
ISRC: track.isrc,
|
||||||
|
BPM: track.bpm,
|
||||||
|
},
|
||||||
|
coverPath: albumCoverPath,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
console.log('warn: m4a tagging not yet implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchAlbumTracks(albumId: number) {
|
||||||
|
let offset = 0
|
||||||
|
const tracks: TidalTrack[] = []
|
||||||
|
while (true) {
|
||||||
|
const res = await ffetch(`https://tidal.com/v1/albums/${albumId}/items`, { query: {
|
||||||
|
...COMMON_QUERY,
|
||||||
|
replace: true,
|
||||||
|
offset,
|
||||||
|
limit: 100,
|
||||||
|
} }).parsedJson(z.object({
|
||||||
|
items: z.array(z.object({
|
||||||
|
item: TidalTrack,
|
||||||
|
type: z.literal('track'),
|
||||||
|
})),
|
||||||
|
totalNumberOfItems: z.number(),
|
||||||
|
}))
|
||||||
|
|
||||||
|
for (const item of res.items) {
|
||||||
|
tracks.push(item.item)
|
||||||
|
}
|
||||||
|
if (tracks.length >= res.totalNumberOfItems) break
|
||||||
|
offset += 100
|
||||||
|
}
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadTrackList(opts: {
|
||||||
|
tracks: TidalTrack[]
|
||||||
|
albums: Map<number, TidalAlbum>
|
||||||
|
albumCoverPaths: Map<number, string>
|
||||||
|
destination: string
|
||||||
|
includeTrackNumber?: boolean
|
||||||
|
onDownloadStart?: (track: TidalTrack) => void
|
||||||
|
onDownloadEnd?: (track: TidalTrack, error: Error | null) => void
|
||||||
|
}) {
|
||||||
|
await mkdir(opts.destination, { recursive: true })
|
||||||
|
|
||||||
|
const isMultiDisc = opts.tracks.some(it => it.volumeNumber !== 1)
|
||||||
|
const firstTrackArtistString = getTrackArtistString(opts.tracks[0])
|
||||||
|
const isDifferentArtists = opts.tracks.some(it => getTrackArtistString(it) !== firstTrackArtistString)
|
||||||
|
|
||||||
|
await asyncPool(opts.tracks, async (track) => {
|
||||||
|
let filename = ''
|
||||||
|
if (opts.includeTrackNumber) {
|
||||||
|
if (isMultiDisc) {
|
||||||
|
filename = `${track.volumeNumber}-`
|
||||||
|
}
|
||||||
|
filename = `${track.trackNumber.toString().padStart(2, '0')}. `
|
||||||
|
}
|
||||||
|
if (isDifferentArtists) {
|
||||||
|
filename += `${getTrackArtistString(track)} - `
|
||||||
|
}
|
||||||
|
filename += `${getTrackName(track)}`
|
||||||
|
|
||||||
|
const filenamePath = join(opts.destination, sanitizeFilename(filename))
|
||||||
|
|
||||||
|
try {
|
||||||
|
opts.onDownloadStart?.(track)
|
||||||
|
await downloadTrack({
|
||||||
|
track,
|
||||||
|
album: opts.albums.get(track.album.id)!,
|
||||||
|
albumCoverPath: opts.albumCoverPaths.get(track.album.id)!,
|
||||||
|
destination: filenamePath,
|
||||||
|
})
|
||||||
|
opts.onDownloadEnd?.(track, null)
|
||||||
|
} catch (e) {
|
||||||
|
opts.onDownloadEnd?.(track, unknownToError(e))
|
||||||
|
}
|
||||||
|
}, { limit: 8 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = process.argv[2] ?? await question('url or search > ')
|
||||||
|
|
||||||
|
/* eslint-disable no-cond-assign */
|
||||||
|
|
||||||
|
let m
|
||||||
|
if ((m = url.match(/\/track\/(\d+)/))) {
|
||||||
|
const track = await ffetch(`https://tidal.com/v1/tracks/${m[1]}`, { query: COMMON_QUERY })
|
||||||
|
.parsedJson(TidalTrack)
|
||||||
|
const [albumRes, albumCoverRes] = await Promise.all([
|
||||||
|
ffetch(`https://tidal.com/v1/albums/${track.album.id}`, { query: COMMON_QUERY }).parsedJson(TidalAlbum),
|
||||||
|
ffetch(getAlbumCoverUrl(track.album.cover)).arrayBuffer(),
|
||||||
|
])
|
||||||
|
|
||||||
|
const tmpAlbumCoverPath = join(`assets/tidal-${track.album.cover}.jpg`)
|
||||||
|
await writeFile(tmpAlbumCoverPath, new Uint8Array(albumCoverRes))
|
||||||
|
|
||||||
|
await downloadTrack({
|
||||||
|
track,
|
||||||
|
album: albumRes,
|
||||||
|
albumCoverPath: tmpAlbumCoverPath,
|
||||||
|
destination: join('assets/tidal-dl', sanitizeFilename(`${getTrackArtistString(track)} - ${getTrackName(track)}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
await rm(tmpAlbumCoverPath)
|
||||||
|
} else if ((m = url.match(/\/album\/(\d+)/))) {
|
||||||
|
const [albumRes, albumTracks] = await Promise.all([
|
||||||
|
ffetch(`https://tidal.com/v1/albums/${m[1]}`, { query: COMMON_QUERY }).parsedJson(TidalAlbum),
|
||||||
|
fetchAlbumTracks(m[1]),
|
||||||
|
])
|
||||||
|
|
||||||
|
console.log(`downloading album ${albumRes.title} with ${albumTracks.length} tracks`)
|
||||||
|
|
||||||
|
const outDir = join('assets/tidal-dl', `${getTrackArtistString(albumRes)} - ${sanitizeFilename(albumRes.title)}`)
|
||||||
|
await mkdir(outDir, { recursive: true })
|
||||||
|
|
||||||
|
const albumCoverRes = await ffetch(getAlbumCoverUrl(albumRes.cover)).arrayBuffer()
|
||||||
|
await writeFile(join(outDir, 'cover.jpg'), new Uint8Array(albumCoverRes))
|
||||||
|
|
||||||
|
const spinnies = new Spinnies()
|
||||||
|
spinnies.add('download', { text: 'downloading album...' })
|
||||||
|
|
||||||
|
const errors = new Map<number, Error>()
|
||||||
|
await downloadTrackList({
|
||||||
|
tracks: albumTracks,
|
||||||
|
albums: new Map([[albumRes.id, albumRes]]),
|
||||||
|
albumCoverPaths: new Map([[albumRes.id, join(outDir, 'cover.jpg')]]),
|
||||||
|
destination: outDir,
|
||||||
|
includeTrackNumber: true,
|
||||||
|
onDownloadStart(track) {
|
||||||
|
spinnies.add(`${track.id}`, { text: getTrackName(track) })
|
||||||
|
},
|
||||||
|
onDownloadEnd(track, error) {
|
||||||
|
spinnies.remove(`${track.id}`)
|
||||||
|
if (error) {
|
||||||
|
errors.set(track.id, error)
|
||||||
|
}
|
||||||
|
spinnies.remove(`${track.id}`)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
spinnies.succeed('download', { text: 'downloaded album' })
|
||||||
|
|
||||||
|
if (errors.size) {
|
||||||
|
console.error('errors:')
|
||||||
|
for (const [id, error] of errors) {
|
||||||
|
console.error(` ${id}: ${error.message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
todo('unsupported url')
|
||||||
|
}
|
||||||
|
|
@ -43,13 +43,19 @@ export async function runMetaflac(options: {
|
||||||
| 'TRACKNUMBER'
|
| 'TRACKNUMBER'
|
||||||
| 'COMMENT'
|
| 'COMMENT'
|
||||||
| 'PRODUCER'
|
| 'PRODUCER'
|
||||||
|
| 'LYRICIST'
|
||||||
|
| 'PERFORMER'
|
||||||
| 'COPYRIGHT'
|
| 'COPYRIGHT'
|
||||||
| 'ISRC'
|
| 'ISRC'
|
||||||
| 'LYRICS'
|
| 'LYRICS'
|
||||||
| 'MAIN_ARTIST',
|
| 'MAIN_ARTIST'
|
||||||
|
| 'REPLAYGAIN_ALBUM_GAIN'
|
||||||
|
| 'REPLAYGAIN_TRACK_GAIN'
|
||||||
|
| 'REPLAYGAIN_ALBUM_PEAK'
|
||||||
|
| 'REPLAYGAIN_TRACK_PEAK'
|
||||||
|
| 'BPM',
|
||||||
string | number | string[] | null
|
string | number | string[] | null
|
||||||
>
|
>>
|
||||||
>
|
|
||||||
coverPath?: string
|
coverPath?: string
|
||||||
}) {
|
}) {
|
||||||
const params: string[] = [
|
const params: string[] = [
|
||||||
|
|
@ -100,3 +106,16 @@ export async function pipeIntoProc(proc: ProcessPromise, stream: ReadableStream)
|
||||||
pipe.on('finish', resolve)
|
pipe.on('finish', resolve)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function writeIntoProc(proc: ProcessPromise, data: Uint8Array) {
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
proc.stdin.write(data, (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err)
|
||||||
|
} else {
|
||||||
|
proc.stdin.end()
|
||||||
|
resolve()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
||||||
123
utils/mpd.ts
Normal file
123
utils/mpd.ts
Normal file
|
|
@ -0,0 +1,123 @@
|
||||||
|
import { assert, ConditionVariable } from '@fuman/utils'
|
||||||
|
import { load } from 'cheerio'
|
||||||
|
import { ffetch } from './fetch.ts'
|
||||||
|
import { writeWebStreamToFile } from './fs.ts'
|
||||||
|
|
||||||
|
interface SimpleMpd {
|
||||||
|
codecs: string
|
||||||
|
initUrl: string
|
||||||
|
segmentUrls: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseSimpleMpd(xml: string): SimpleMpd {
|
||||||
|
const $ = load(xml, { xml: true })
|
||||||
|
|
||||||
|
const period = $('Period')
|
||||||
|
assert(period.length === 1, 'expected exactly one period')
|
||||||
|
|
||||||
|
const adaptations = period.find('AdaptationSet')
|
||||||
|
assert(adaptations.length === 1, 'expected exactly one adaptation set')
|
||||||
|
|
||||||
|
const representation = adaptations.find('Representation')
|
||||||
|
assert(representation.length === 1, 'expected exactly one representation')
|
||||||
|
|
||||||
|
const segmentTemplate = representation.find('SegmentTemplate')
|
||||||
|
assert(segmentTemplate.length === 1, 'expected exactly one segment template')
|
||||||
|
|
||||||
|
const initUrl = segmentTemplate.attr('initialization')
|
||||||
|
const templateUrl = segmentTemplate.attr('media')
|
||||||
|
const startNum = segmentTemplate.attr('startNumber')
|
||||||
|
|
||||||
|
assert(initUrl !== undefined, 'expected initialization url')
|
||||||
|
assert(templateUrl !== undefined, 'expected template url')
|
||||||
|
assert(!templateUrl.match(/\$(RepresentationID|Bandwidth|Time)\$/), 'unsupported template url')
|
||||||
|
assert(startNum !== undefined, 'expected start number')
|
||||||
|
|
||||||
|
const timeline = segmentTemplate.find('SegmentTimeline')
|
||||||
|
assert(timeline.length === 1, 'expected exactly one segment timeline')
|
||||||
|
|
||||||
|
const segments = timeline.find('S')
|
||||||
|
assert(segments.length > 0, 'expected at least one segment')
|
||||||
|
|
||||||
|
const segmentUrls: string[] = []
|
||||||
|
|
||||||
|
let segmentNum = Number(startNum)
|
||||||
|
for (const segment of segments) {
|
||||||
|
const duration = $(segment).attr('d')
|
||||||
|
assert(duration !== undefined, 'expected duration')
|
||||||
|
const r = $(segment).attr('r')
|
||||||
|
const repeats = r ? Number.parseInt(r) + 1 : 1
|
||||||
|
|
||||||
|
for (let i = 0; i < repeats; i++) {
|
||||||
|
segmentUrls.push(templateUrl.replace('$Number$', String(segmentNum)))
|
||||||
|
segmentNum++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
codecs: representation.attr('codecs')!,
|
||||||
|
initUrl,
|
||||||
|
segmentUrls,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function concatMpdSegments(options: {
|
||||||
|
mpd: SimpleMpd
|
||||||
|
fetch: (url: string) => Promise<Uint8Array>
|
||||||
|
poolSize?: number
|
||||||
|
}): ReadableStream {
|
||||||
|
const { mpd, fetch, poolSize = 8 } = options
|
||||||
|
|
||||||
|
let nextSegmentIdx = -1
|
||||||
|
let nextWorkerSegmentIdx = -1
|
||||||
|
const nextSegmentCv = new ConditionVariable()
|
||||||
|
const buffer: Record<number, Uint8Array> = {}
|
||||||
|
|
||||||
|
const downloadSegment = async (idx = nextWorkerSegmentIdx++) => {
|
||||||
|
// console.log('downloading segment %s', idx)
|
||||||
|
const url = idx === -1 ? mpd.initUrl : mpd.segmentUrls[idx]
|
||||||
|
const chunk = await fetch(url)
|
||||||
|
buffer[idx] = chunk
|
||||||
|
|
||||||
|
if (idx === nextSegmentIdx) {
|
||||||
|
nextSegmentCv.notify()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextWorkerSegmentIdx < mpd.segmentUrls.length) {
|
||||||
|
return downloadSegment()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let error: unknown
|
||||||
|
void Promise.all(Array.from({
|
||||||
|
length: Math.min(poolSize, mpd.segmentUrls.length),
|
||||||
|
}, downloadSegment))
|
||||||
|
.catch((e) => {
|
||||||
|
error = e
|
||||||
|
nextSegmentCv.notify()
|
||||||
|
})
|
||||||
|
|
||||||
|
return new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
while (true) {
|
||||||
|
await nextSegmentCv.wait()
|
||||||
|
if (error) {
|
||||||
|
controller.error(error)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
while (nextSegmentIdx in buffer) {
|
||||||
|
const buf = buffer[nextSegmentIdx]
|
||||||
|
delete buffer[nextSegmentIdx]
|
||||||
|
nextSegmentIdx++
|
||||||
|
controller.enqueue(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextSegmentIdx >= mpd.segmentUrls.length) {
|
||||||
|
controller.close()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue