diff --git a/scripts/media/soundcloud-dl.ts b/scripts/media/soundcloud-dl.ts index e931f92..fc77526 100644 --- a/scripts/media/soundcloud-dl.ts +++ b/scripts/media/soundcloud-dl.ts @@ -9,8 +9,9 @@ import { z } from 'zod' import { $, ProcessOutput, question } from 'zx' import { downloadFile, ffetch as ffetchBase } from '../../utils/fetch.ts' import { sanitizeFilename } from '../../utils/fs.ts' +import { generateOpusImageBlob, pipeIntoProc } from '../../utils/media-metadata.ts' import { chunks, getEnv } from '../../utils/misc.ts' -import { generateOpusImageBlob } from '../../utils/media-metadata.ts' +import { concatSegments, parseSimpleHls } from '../../utils/mp4-streaming.ts' const ffetchApi = ffetchBase.extend({ baseUrl: 'https://api-v2.soundcloud.com', @@ -29,6 +30,9 @@ const ffetchApi = ffetchBase.extend({ }) const ffetchHtml = ffetchBase.extend({ baseUrl: 'https://soundcloud.com', + retry: { + maxRetries: 3, + }, headers: { Cookie: `oauth_token=${getEnv('SOUNDCLOUD_TOKEN')}`, }, @@ -156,7 +160,7 @@ async function downloadTrack(track: ScTrack, opts: { transcoding = t } - const { url: hlsUrl } = await ffetchApi(transcoding.url, { + const { url: mediaUrl } = await ffetchApi(transcoding.url, { query: { track_authorization: track.track_authorization, }, @@ -174,14 +178,39 @@ async function downloadTrack(track: ScTrack, opts: { url: z.string(), })) - let ext = transcoding.format.mime_type.match(/^audio\/(\w+)(;|$)/)![1] - if (ext === 'mp4') ext = 'm4a' + const ext = { + aac_256k: 'm4a', + aac_160k: 'm4a', + aac_1_0: 'm4a', + aac_hq: 'm4a', + abr_hq: 'm4a', + abr_sq: 'm4a', + mp3_0_0: 'mp3', + opus_0_0: 'ogg', + }[transcoding.preset] + if (!ext) { + throw new Error(`Unsupported transcoding preset: ${transcoding.preset}`) + } const filename = `${opts.destination}.${ext}` + let stream: ReadableStream | null = null + let ffmpegInput: string + if (transcoding.format.protocol === 'hls') { + const segments = parseSimpleHls(await ffetchHtml(mediaUrl).text()) + stream = concatSegments({ + segments, + poolSize: 4, + fetch: async url => new Uint8Array(await ffetchHtml(url).arrayBuffer()), + }) + ffmpegInput = 'pipe:0' + } else { + ffmpegInput = mediaUrl + } + const params: string[] = [ '-y', '-i', - hlsUrl, + ffmpegInput, ] if (artworkBytes) { @@ -228,13 +257,17 @@ async function downloadTrack(track: ScTrack, opts: { '-metadata', `artist=${track.user.username}`, '-metadata', - `comment=${track.description ?? ''}`, + `comment=${`${track.description ?? ''}\n\nripped from soundcloud (id: ${track.id}, url: ${track.permalink_url})`}`.trimStart(), filename, ) while (true) { try { - await $`ffmpeg ${params}`.quiet(true) + const promise = $`ffmpeg ${params}`.quiet(true) + if (stream) { + await pipeIntoProc(promise, stream) + } + await promise break } catch (e) { if (!(e instanceof ProcessOutput)) { diff --git a/scripts/media/tidal-dl.ts b/scripts/media/tidal-dl.ts index cae999a..8f1d367 100644 --- a/scripts/media/tidal-dl.ts +++ b/scripts/media/tidal-dl.ts @@ -9,8 +9,7 @@ import { ffetch as ffetchBase } from '../../utils/fetch.ts' import { sanitizeFilename } from '../../utils/fs.ts' import { pipeIntoProc, runMetaflac, writeIntoProc } from '../../utils/media-metadata.ts' import { getEnv } from '../../utils/misc.ts' -import { concatMpdSegments, parseSimpleMpd } from '../../utils/mpd.ts' -import { createLibcurlFetch } from '../../utils/temkakit/libcurl.ts' +import { concatSegments, parseSimpleMpd } from '../../utils/mp4-streaming.ts' const oauthResponse = await ffetchBase('https://auth.tidal.com/v1/oauth2/token', { form: { @@ -192,8 +191,8 @@ async function downloadTrack(options: { ] const proc = $`ffmpeg ${params}` - await pipeIntoProc(proc, concatMpdSegments({ - mpd: parseSimpleMpd(utf8.decoder.decode(manifest)), + await pipeIntoProc(proc, concatSegments({ + segments: parseSimpleMpd(utf8.decoder.decode(manifest)).segments, fetch: async url => new Uint8Array(await ffetch(url).arrayBuffer()), })) await proc @@ -408,7 +407,7 @@ if ((m = url.match(/\/track\/(\d+)/))) { await rm(tmpAlbumCoverPath) } else if ((m = url.match(/\/album\/(\d+)/))) { - await downloadAlbum(m[1]) + await downloadAlbum(Number(m[1])) } else if ((m = url.match(/\/artist\/(\d+)/))) { const withAppearsOn = (await question('include appears on albums? (y/N) > ')).toLowerCase() === 'y' diff --git a/scripts/misc/ap-signed.ts b/scripts/misc/ap-signed.ts new file mode 100644 index 0000000..b2a8e52 --- /dev/null +++ b/scripts/misc/ap-signed.ts @@ -0,0 +1,99 @@ +import crypto from 'node:crypto' +import { question } from 'zx' +import { ffetch } from '../../utils/fetch.ts' +import { getEnv } from '../../utils/misc.ts' + +function bigintToBase64Url(bn) { + let hex = bn.toString(16) + if (hex.length % 2) hex = `0${hex}` + // eslint-disable-next-line no-restricted-globals + const buf = Buffer.from(hex, 'hex') + return buf.toString('base64') + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=+$/, '') +} + +interface GoPrivateKey { + N: bigint + E: bigint + D: bigint + Primes: [bigint, bigint] + Precomputed?: { + Dp?: bigint + Dq?: bigint + Qinv?: bigint + } +} + +function goKeyToPEM(goKey: GoPrivateKey) { + const jwk = { + kty: 'RSA', + n: bigintToBase64Url(goKey.N), + e: bigintToBase64Url(goKey.E), + d: bigintToBase64Url(goKey.D), + p: bigintToBase64Url(goKey.Primes[0]), + q: bigintToBase64Url(goKey.Primes[1]), + dp: goKey.Precomputed?.Dp ? bigintToBase64Url(goKey.Precomputed.Dp) : undefined, + dq: goKey.Precomputed?.Dq ? bigintToBase64Url(goKey.Precomputed.Dq) : undefined, + qi: goKey.Precomputed?.Qinv ? bigintToBase64Url(goKey.Precomputed.Qinv) : undefined, + } + + // Remove undefined fields (dp/dq/qi could be missing if not precomputed) + Object.keys(jwk).forEach(k => jwk[k] === undefined && delete jwk[k]) + + const keyObject = crypto.createPrivateKey({ key: jwk, format: 'jwk' }) + return keyObject.export({ type: 'pkcs8', format: 'pem' }) +} + +// ! currently only supports gts privkey format, but should be easy enough to support other key formats +const privKey = goKeyToPEM(JSON.parse(getEnv('AP_PRIVKEY'), ((key, value, ctx) => { + // go privkey json stores long numbers as just numbers so we need to convert them to bigints. requires node 20+ i think + if (typeof value === 'number') return BigInt(ctx.source) + return value +}) as any)) +const actor = getEnv('AP_ACTOR') + +const url = new URL(process.argv[2] ?? (await question('url > '))) +const body = (process.argv[3] ?? (await question('body (empty for GET) > '))).trim() + +const host = url.host +const path = url.pathname +const method = body ? 'POST' : 'GET' + +const date = new Date().toUTCString() +const digest = body ? `SHA-256=${crypto.createHash('sha256').update(body).digest('base64')}` : undefined +let toSign = `(request-target): ${method.toLowerCase()} ${path} +host: ${host} +date: ${date}` +if (body) { + toSign += `\ndigest: ${digest}` +} + +const signature = crypto.createSign('RSA-SHA256').update(toSign).sign(privKey, 'base64') + +const headers: Record = { + 'Date': date, + 'Signature': `keyId="${actor}#main-key",headers="(request-target) host date${body ? ' digest' : ''}",algorithm="rsa-sha256",signature="${signature}"`, + 'Content-Type': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', + 'Accept': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', +} +if (body) { + headers.Digest = digest! +} + +const res = await ffetch(`https://${host}${path}`, { + headers, + method, + body: body || undefined, + validateResponse: false, +}) +console.log(res.status) +const resText = await res.text() +if (resText[0] !== '{') { + console.error('bad response:', resText) + process.exit(1) +} + +const json = JSON.parse(resText) +console.dir(json, { depth: null }) diff --git a/scripts/misc/sourcemap-extractor.ts b/scripts/misc/sourcemap-extractor.ts new file mode 100644 index 0000000..9e35802 --- /dev/null +++ b/scripts/misc/sourcemap-extractor.ts @@ -0,0 +1,48 @@ +/** + * Simple tool for extracting source code from sourcemaps. + * Puts extracted files in ./_extracted/ + * Puts files from `../` (for example, webpack puts there node_modules) dir to _extracted/__/, + * and files from `./` to _extracted/ + * + * (c) tggdesu 2019. Licensed under GPLv3. + */ + +import * as fs from 'node:fs/promises' +import * as path from 'node:path' +import { question } from 'zx' + +interface SourceMap { + sources: string[] + sourcesContent: string[] +} + +const input = await question('URL or path to file > ') +const outDirDefault = `./assets/${input.split('/').pop()?.split('?')[0]}_extracted` +const outDir = await question(`Output directory [${outDirDefault}]> `) + +let content: SourceMap +if (/^https?:\/\//i.test(input)) { + console.log('[+] Fetching sourcemap') + const response = await fetch(input) + content = await response.json() as SourceMap +} else { + const data = await fs.readFile(input, 'utf-8') + content = JSON.parse(data) as SourceMap +} + +if (!content.sources || !content.sourcesContent || content.sources.length !== content.sourcesContent.length) { + console.error('[!] Sourcemap is either invalid or does not contain source code') + process.exit(1) +} + +for (let i = 0; i < content.sources.length; i++) { + const fname = content.sources[i] + .replace(/[\\/]\.[\\/]/g, '/_/') + .replace(/[\\/]\.\.[\\/]/g, '/__/') + .replace(/[:*?'"<>|&]/g, '') + process.stdout.write(`[~] ${fname}\r`) + await fs.mkdir(path.join(outDir, path.dirname(fname)), { recursive: true }) + await fs.writeFile(path.join(outDir, fname), content.sourcesContent[i]) +} + +console.log('\n[v] Finished!') diff --git a/utils/mpd.ts b/utils/mp4-streaming.ts similarity index 74% rename from utils/mpd.ts rename to utils/mp4-streaming.ts index 93b0fe3..b5ca997 100644 --- a/utils/mpd.ts +++ b/utils/mp4-streaming.ts @@ -5,8 +5,7 @@ import { writeWebStreamToFile } from './fs.ts' interface SimpleMpd { codecs: string - initUrl: string - segmentUrls: string[] + segments: string[] } export function parseSimpleMpd(xml: string): SimpleMpd { @@ -39,7 +38,7 @@ export function parseSimpleMpd(xml: string): SimpleMpd { const segments = timeline.find('S') assert(segments.length > 0, 'expected at least one segment') - const segmentUrls: string[] = [] + const segmentUrls: string[] = [initUrl] let segmentNum = Number(startNum) for (const segment of segments) { @@ -56,26 +55,51 @@ export function parseSimpleMpd(xml: string): SimpleMpd { return { codecs: representation.attr('codecs')!, - initUrl, - segmentUrls, + segments: segmentUrls, } } -export function concatMpdSegments(options: { - mpd: SimpleMpd +export function parseSimpleHls(m3u8: string): string[] { + let initUrl: string | undefined + const segments: string[] = [] + + const lines = m3u8.split('\n') + + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + if (line.startsWith('#EXT-X-MAP:URI=')) { + initUrl = JSON.parse(line.slice('#EXT-X-MAP:URI='.length)) + } else if (line.startsWith('#EXTINF:')) { + const segmentUrl = lines[i + 1] + segments.push(segmentUrl) + i++ + } else if (line.startsWith('#EXT-X-ENDLIST')) { + break + } + } + + if (initUrl) { + segments.unshift(initUrl) + } + + return segments +} + +export function concatSegments(options: { + segments: string[] fetch: (url: string) => Promise poolSize?: number }): ReadableStream { - const { mpd, fetch, poolSize = 8 } = options + const { segments, fetch, poolSize = 8 } = options - let nextSegmentIdx = -1 - let nextWorkerSegmentIdx = -1 + let nextSegmentIdx = 0 + let nextWorkerSegmentIdx = 0 const nextSegmentCv = new ConditionVariable() const buffer: Record = {} const downloadSegment = async (idx = nextWorkerSegmentIdx++) => { // console.log('downloading segment %s', idx) - const url = idx === -1 ? mpd.initUrl : mpd.segmentUrls[idx] + const url = segments[idx] const chunk = await fetch(url) buffer[idx] = chunk @@ -83,14 +107,14 @@ export function concatMpdSegments(options: { nextSegmentCv.notify() } - if (nextWorkerSegmentIdx < mpd.segmentUrls.length) { + if (nextWorkerSegmentIdx < segments.length) { return downloadSegment() } } let error: unknown void Promise.all(Array.from({ - length: Math.min(poolSize, mpd.segmentUrls.length), + length: Math.min(poolSize, segments.length), }, downloadSegment)) .catch((e) => { error = e @@ -113,7 +137,7 @@ export function concatMpdSegments(options: { controller.enqueue(buf) } - if (nextSegmentIdx >= mpd.segmentUrls.length) { + if (nextSegmentIdx >= segments.length) { controller.close() return }