mirror of
https://git.stupid.fish/teidesu/scripts.git
synced 2026-01-12 15:11:10 +11:00
chore: update public repo
This commit is contained in:
parent
ef375d1188
commit
46cf487f04
5 changed files with 229 additions and 26 deletions
|
|
@ -9,8 +9,9 @@ import { z } from 'zod'
|
||||||
import { $, ProcessOutput, question } from 'zx'
|
import { $, ProcessOutput, question } from 'zx'
|
||||||
import { downloadFile, ffetch as ffetchBase } from '../../utils/fetch.ts'
|
import { downloadFile, ffetch as ffetchBase } from '../../utils/fetch.ts'
|
||||||
import { sanitizeFilename } from '../../utils/fs.ts'
|
import { sanitizeFilename } from '../../utils/fs.ts'
|
||||||
|
import { generateOpusImageBlob, pipeIntoProc } from '../../utils/media-metadata.ts'
|
||||||
import { chunks, getEnv } from '../../utils/misc.ts'
|
import { chunks, getEnv } from '../../utils/misc.ts'
|
||||||
import { generateOpusImageBlob } from '../../utils/media-metadata.ts'
|
import { concatSegments, parseSimpleHls } from '../../utils/mp4-streaming.ts'
|
||||||
|
|
||||||
const ffetchApi = ffetchBase.extend({
|
const ffetchApi = ffetchBase.extend({
|
||||||
baseUrl: 'https://api-v2.soundcloud.com',
|
baseUrl: 'https://api-v2.soundcloud.com',
|
||||||
|
|
@ -29,6 +30,9 @@ const ffetchApi = ffetchBase.extend({
|
||||||
})
|
})
|
||||||
const ffetchHtml = ffetchBase.extend({
|
const ffetchHtml = ffetchBase.extend({
|
||||||
baseUrl: 'https://soundcloud.com',
|
baseUrl: 'https://soundcloud.com',
|
||||||
|
retry: {
|
||||||
|
maxRetries: 3,
|
||||||
|
},
|
||||||
headers: {
|
headers: {
|
||||||
Cookie: `oauth_token=${getEnv('SOUNDCLOUD_TOKEN')}`,
|
Cookie: `oauth_token=${getEnv('SOUNDCLOUD_TOKEN')}`,
|
||||||
},
|
},
|
||||||
|
|
@ -156,7 +160,7 @@ async function downloadTrack(track: ScTrack, opts: {
|
||||||
transcoding = t
|
transcoding = t
|
||||||
}
|
}
|
||||||
|
|
||||||
const { url: hlsUrl } = await ffetchApi(transcoding.url, {
|
const { url: mediaUrl } = await ffetchApi(transcoding.url, {
|
||||||
query: {
|
query: {
|
||||||
track_authorization: track.track_authorization,
|
track_authorization: track.track_authorization,
|
||||||
},
|
},
|
||||||
|
|
@ -174,14 +178,39 @@ async function downloadTrack(track: ScTrack, opts: {
|
||||||
url: z.string(),
|
url: z.string(),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
let ext = transcoding.format.mime_type.match(/^audio\/(\w+)(;|$)/)![1]
|
const ext = {
|
||||||
if (ext === 'mp4') ext = 'm4a'
|
aac_256k: 'm4a',
|
||||||
|
aac_160k: 'm4a',
|
||||||
|
aac_1_0: 'm4a',
|
||||||
|
aac_hq: 'm4a',
|
||||||
|
abr_hq: 'm4a',
|
||||||
|
abr_sq: 'm4a',
|
||||||
|
mp3_0_0: 'mp3',
|
||||||
|
opus_0_0: 'ogg',
|
||||||
|
}[transcoding.preset]
|
||||||
|
if (!ext) {
|
||||||
|
throw new Error(`Unsupported transcoding preset: ${transcoding.preset}`)
|
||||||
|
}
|
||||||
const filename = `${opts.destination}.${ext}`
|
const filename = `${opts.destination}.${ext}`
|
||||||
|
|
||||||
|
let stream: ReadableStream | null = null
|
||||||
|
let ffmpegInput: string
|
||||||
|
if (transcoding.format.protocol === 'hls') {
|
||||||
|
const segments = parseSimpleHls(await ffetchHtml(mediaUrl).text())
|
||||||
|
stream = concatSegments({
|
||||||
|
segments,
|
||||||
|
poolSize: 4,
|
||||||
|
fetch: async url => new Uint8Array(await ffetchHtml(url).arrayBuffer()),
|
||||||
|
})
|
||||||
|
ffmpegInput = 'pipe:0'
|
||||||
|
} else {
|
||||||
|
ffmpegInput = mediaUrl
|
||||||
|
}
|
||||||
|
|
||||||
const params: string[] = [
|
const params: string[] = [
|
||||||
'-y',
|
'-y',
|
||||||
'-i',
|
'-i',
|
||||||
hlsUrl,
|
ffmpegInput,
|
||||||
]
|
]
|
||||||
|
|
||||||
if (artworkBytes) {
|
if (artworkBytes) {
|
||||||
|
|
@ -228,13 +257,17 @@ async function downloadTrack(track: ScTrack, opts: {
|
||||||
'-metadata',
|
'-metadata',
|
||||||
`artist=${track.user.username}`,
|
`artist=${track.user.username}`,
|
||||||
'-metadata',
|
'-metadata',
|
||||||
`comment=${track.description ?? ''}`,
|
`comment=${`${track.description ?? ''}\n\nripped from soundcloud (id: ${track.id}, url: ${track.permalink_url})`}`.trimStart(),
|
||||||
filename,
|
filename,
|
||||||
)
|
)
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
await $`ffmpeg ${params}`.quiet(true)
|
const promise = $`ffmpeg ${params}`.quiet(true)
|
||||||
|
if (stream) {
|
||||||
|
await pipeIntoProc(promise, stream)
|
||||||
|
}
|
||||||
|
await promise
|
||||||
break
|
break
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (!(e instanceof ProcessOutput)) {
|
if (!(e instanceof ProcessOutput)) {
|
||||||
|
|
|
||||||
|
|
@ -9,8 +9,7 @@ import { ffetch as ffetchBase } from '../../utils/fetch.ts'
|
||||||
import { sanitizeFilename } from '../../utils/fs.ts'
|
import { sanitizeFilename } from '../../utils/fs.ts'
|
||||||
import { pipeIntoProc, runMetaflac, writeIntoProc } from '../../utils/media-metadata.ts'
|
import { pipeIntoProc, runMetaflac, writeIntoProc } from '../../utils/media-metadata.ts'
|
||||||
import { getEnv } from '../../utils/misc.ts'
|
import { getEnv } from '../../utils/misc.ts'
|
||||||
import { concatMpdSegments, parseSimpleMpd } from '../../utils/mpd.ts'
|
import { concatSegments, parseSimpleMpd } from '../../utils/mp4-streaming.ts'
|
||||||
import { createLibcurlFetch } from '../../utils/temkakit/libcurl.ts'
|
|
||||||
|
|
||||||
const oauthResponse = await ffetchBase('https://auth.tidal.com/v1/oauth2/token', {
|
const oauthResponse = await ffetchBase('https://auth.tidal.com/v1/oauth2/token', {
|
||||||
form: {
|
form: {
|
||||||
|
|
@ -192,8 +191,8 @@ async function downloadTrack(options: {
|
||||||
]
|
]
|
||||||
|
|
||||||
const proc = $`ffmpeg ${params}`
|
const proc = $`ffmpeg ${params}`
|
||||||
await pipeIntoProc(proc, concatMpdSegments({
|
await pipeIntoProc(proc, concatSegments({
|
||||||
mpd: parseSimpleMpd(utf8.decoder.decode(manifest)),
|
segments: parseSimpleMpd(utf8.decoder.decode(manifest)).segments,
|
||||||
fetch: async url => new Uint8Array(await ffetch(url).arrayBuffer()),
|
fetch: async url => new Uint8Array(await ffetch(url).arrayBuffer()),
|
||||||
}))
|
}))
|
||||||
await proc
|
await proc
|
||||||
|
|
@ -408,7 +407,7 @@ if ((m = url.match(/\/track\/(\d+)/))) {
|
||||||
|
|
||||||
await rm(tmpAlbumCoverPath)
|
await rm(tmpAlbumCoverPath)
|
||||||
} else if ((m = url.match(/\/album\/(\d+)/))) {
|
} else if ((m = url.match(/\/album\/(\d+)/))) {
|
||||||
await downloadAlbum(m[1])
|
await downloadAlbum(Number(m[1]))
|
||||||
} else if ((m = url.match(/\/artist\/(\d+)/))) {
|
} else if ((m = url.match(/\/artist\/(\d+)/))) {
|
||||||
const withAppearsOn = (await question('include appears on albums? (y/N) > ')).toLowerCase() === 'y'
|
const withAppearsOn = (await question('include appears on albums? (y/N) > ')).toLowerCase() === 'y'
|
||||||
|
|
||||||
|
|
|
||||||
99
scripts/misc/ap-signed.ts
Normal file
99
scripts/misc/ap-signed.ts
Normal file
|
|
@ -0,0 +1,99 @@
|
||||||
|
import crypto from 'node:crypto'
|
||||||
|
import { question } from 'zx'
|
||||||
|
import { ffetch } from '../../utils/fetch.ts'
|
||||||
|
import { getEnv } from '../../utils/misc.ts'
|
||||||
|
|
||||||
|
function bigintToBase64Url(bn) {
|
||||||
|
let hex = bn.toString(16)
|
||||||
|
if (hex.length % 2) hex = `0${hex}`
|
||||||
|
// eslint-disable-next-line no-restricted-globals
|
||||||
|
const buf = Buffer.from(hex, 'hex')
|
||||||
|
return buf.toString('base64')
|
||||||
|
.replace(/\+/g, '-')
|
||||||
|
.replace(/\//g, '_')
|
||||||
|
.replace(/=+$/, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GoPrivateKey {
|
||||||
|
N: bigint
|
||||||
|
E: bigint
|
||||||
|
D: bigint
|
||||||
|
Primes: [bigint, bigint]
|
||||||
|
Precomputed?: {
|
||||||
|
Dp?: bigint
|
||||||
|
Dq?: bigint
|
||||||
|
Qinv?: bigint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function goKeyToPEM(goKey: GoPrivateKey) {
|
||||||
|
const jwk = {
|
||||||
|
kty: 'RSA',
|
||||||
|
n: bigintToBase64Url(goKey.N),
|
||||||
|
e: bigintToBase64Url(goKey.E),
|
||||||
|
d: bigintToBase64Url(goKey.D),
|
||||||
|
p: bigintToBase64Url(goKey.Primes[0]),
|
||||||
|
q: bigintToBase64Url(goKey.Primes[1]),
|
||||||
|
dp: goKey.Precomputed?.Dp ? bigintToBase64Url(goKey.Precomputed.Dp) : undefined,
|
||||||
|
dq: goKey.Precomputed?.Dq ? bigintToBase64Url(goKey.Precomputed.Dq) : undefined,
|
||||||
|
qi: goKey.Precomputed?.Qinv ? bigintToBase64Url(goKey.Precomputed.Qinv) : undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove undefined fields (dp/dq/qi could be missing if not precomputed)
|
||||||
|
Object.keys(jwk).forEach(k => jwk[k] === undefined && delete jwk[k])
|
||||||
|
|
||||||
|
const keyObject = crypto.createPrivateKey({ key: jwk, format: 'jwk' })
|
||||||
|
return keyObject.export({ type: 'pkcs8', format: 'pem' })
|
||||||
|
}
|
||||||
|
|
||||||
|
// ! currently only supports gts privkey format, but should be easy enough to support other key formats
|
||||||
|
const privKey = goKeyToPEM(JSON.parse(getEnv('AP_PRIVKEY'), ((key, value, ctx) => {
|
||||||
|
// go privkey json stores long numbers as just numbers so we need to convert them to bigints. requires node 20+ i think
|
||||||
|
if (typeof value === 'number') return BigInt(ctx.source)
|
||||||
|
return value
|
||||||
|
}) as any))
|
||||||
|
const actor = getEnv('AP_ACTOR')
|
||||||
|
|
||||||
|
const url = new URL(process.argv[2] ?? (await question('url > ')))
|
||||||
|
const body = (process.argv[3] ?? (await question('body (empty for GET) > '))).trim()
|
||||||
|
|
||||||
|
const host = url.host
|
||||||
|
const path = url.pathname
|
||||||
|
const method = body ? 'POST' : 'GET'
|
||||||
|
|
||||||
|
const date = new Date().toUTCString()
|
||||||
|
const digest = body ? `SHA-256=${crypto.createHash('sha256').update(body).digest('base64')}` : undefined
|
||||||
|
let toSign = `(request-target): ${method.toLowerCase()} ${path}
|
||||||
|
host: ${host}
|
||||||
|
date: ${date}`
|
||||||
|
if (body) {
|
||||||
|
toSign += `\ndigest: ${digest}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const signature = crypto.createSign('RSA-SHA256').update(toSign).sign(privKey, 'base64')
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Date': date,
|
||||||
|
'Signature': `keyId="${actor}#main-key",headers="(request-target) host date${body ? ' digest' : ''}",algorithm="rsa-sha256",signature="${signature}"`,
|
||||||
|
'Content-Type': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||||
|
'Accept': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||||
|
}
|
||||||
|
if (body) {
|
||||||
|
headers.Digest = digest!
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await ffetch(`https://${host}${path}`, {
|
||||||
|
headers,
|
||||||
|
method,
|
||||||
|
body: body || undefined,
|
||||||
|
validateResponse: false,
|
||||||
|
})
|
||||||
|
console.log(res.status)
|
||||||
|
const resText = await res.text()
|
||||||
|
if (resText[0] !== '{') {
|
||||||
|
console.error('bad response:', resText)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
const json = JSON.parse(resText)
|
||||||
|
console.dir(json, { depth: null })
|
||||||
48
scripts/misc/sourcemap-extractor.ts
Normal file
48
scripts/misc/sourcemap-extractor.ts
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
/**
|
||||||
|
* Simple tool for extracting source code from sourcemaps.
|
||||||
|
* Puts extracted files in ./<sourcemap_file_name>_extracted/
|
||||||
|
* Puts files from `../` (for example, webpack puts there node_modules) dir to _extracted/__/,
|
||||||
|
* and files from `./` to _extracted/
|
||||||
|
*
|
||||||
|
* (c) tggdesu 2019. Licensed under GPLv3.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from 'node:fs/promises'
|
||||||
|
import * as path from 'node:path'
|
||||||
|
import { question } from 'zx'
|
||||||
|
|
||||||
|
interface SourceMap {
|
||||||
|
sources: string[]
|
||||||
|
sourcesContent: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = await question('URL or path to file > ')
|
||||||
|
const outDirDefault = `./assets/${input.split('/').pop()?.split('?')[0]}_extracted`
|
||||||
|
const outDir = await question(`Output directory [${outDirDefault}]> `)
|
||||||
|
|
||||||
|
let content: SourceMap
|
||||||
|
if (/^https?:\/\//i.test(input)) {
|
||||||
|
console.log('[+] Fetching sourcemap')
|
||||||
|
const response = await fetch(input)
|
||||||
|
content = await response.json() as SourceMap
|
||||||
|
} else {
|
||||||
|
const data = await fs.readFile(input, 'utf-8')
|
||||||
|
content = JSON.parse(data) as SourceMap
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!content.sources || !content.sourcesContent || content.sources.length !== content.sourcesContent.length) {
|
||||||
|
console.error('[!] Sourcemap is either invalid or does not contain source code')
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < content.sources.length; i++) {
|
||||||
|
const fname = content.sources[i]
|
||||||
|
.replace(/[\\/]\.[\\/]/g, '/_/')
|
||||||
|
.replace(/[\\/]\.\.[\\/]/g, '/__/')
|
||||||
|
.replace(/[:*?'"<>|&]/g, '')
|
||||||
|
process.stdout.write(`[~] ${fname}\r`)
|
||||||
|
await fs.mkdir(path.join(outDir, path.dirname(fname)), { recursive: true })
|
||||||
|
await fs.writeFile(path.join(outDir, fname), content.sourcesContent[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n[v] Finished!')
|
||||||
|
|
@ -5,8 +5,7 @@ import { writeWebStreamToFile } from './fs.ts'
|
||||||
|
|
||||||
interface SimpleMpd {
|
interface SimpleMpd {
|
||||||
codecs: string
|
codecs: string
|
||||||
initUrl: string
|
segments: string[]
|
||||||
segmentUrls: string[]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseSimpleMpd(xml: string): SimpleMpd {
|
export function parseSimpleMpd(xml: string): SimpleMpd {
|
||||||
|
|
@ -39,7 +38,7 @@ export function parseSimpleMpd(xml: string): SimpleMpd {
|
||||||
const segments = timeline.find('S')
|
const segments = timeline.find('S')
|
||||||
assert(segments.length > 0, 'expected at least one segment')
|
assert(segments.length > 0, 'expected at least one segment')
|
||||||
|
|
||||||
const segmentUrls: string[] = []
|
const segmentUrls: string[] = [initUrl]
|
||||||
|
|
||||||
let segmentNum = Number(startNum)
|
let segmentNum = Number(startNum)
|
||||||
for (const segment of segments) {
|
for (const segment of segments) {
|
||||||
|
|
@ -56,26 +55,51 @@ export function parseSimpleMpd(xml: string): SimpleMpd {
|
||||||
|
|
||||||
return {
|
return {
|
||||||
codecs: representation.attr('codecs')!,
|
codecs: representation.attr('codecs')!,
|
||||||
initUrl,
|
segments: segmentUrls,
|
||||||
segmentUrls,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function concatMpdSegments(options: {
|
export function parseSimpleHls(m3u8: string): string[] {
|
||||||
mpd: SimpleMpd
|
let initUrl: string | undefined
|
||||||
|
const segments: string[] = []
|
||||||
|
|
||||||
|
const lines = m3u8.split('\n')
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
const line = lines[i]
|
||||||
|
if (line.startsWith('#EXT-X-MAP:URI=')) {
|
||||||
|
initUrl = JSON.parse(line.slice('#EXT-X-MAP:URI='.length))
|
||||||
|
} else if (line.startsWith('#EXTINF:')) {
|
||||||
|
const segmentUrl = lines[i + 1]
|
||||||
|
segments.push(segmentUrl)
|
||||||
|
i++
|
||||||
|
} else if (line.startsWith('#EXT-X-ENDLIST')) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (initUrl) {
|
||||||
|
segments.unshift(initUrl)
|
||||||
|
}
|
||||||
|
|
||||||
|
return segments
|
||||||
|
}
|
||||||
|
|
||||||
|
export function concatSegments(options: {
|
||||||
|
segments: string[]
|
||||||
fetch: (url: string) => Promise<Uint8Array>
|
fetch: (url: string) => Promise<Uint8Array>
|
||||||
poolSize?: number
|
poolSize?: number
|
||||||
}): ReadableStream {
|
}): ReadableStream {
|
||||||
const { mpd, fetch, poolSize = 8 } = options
|
const { segments, fetch, poolSize = 8 } = options
|
||||||
|
|
||||||
let nextSegmentIdx = -1
|
let nextSegmentIdx = 0
|
||||||
let nextWorkerSegmentIdx = -1
|
let nextWorkerSegmentIdx = 0
|
||||||
const nextSegmentCv = new ConditionVariable()
|
const nextSegmentCv = new ConditionVariable()
|
||||||
const buffer: Record<number, Uint8Array> = {}
|
const buffer: Record<number, Uint8Array> = {}
|
||||||
|
|
||||||
const downloadSegment = async (idx = nextWorkerSegmentIdx++) => {
|
const downloadSegment = async (idx = nextWorkerSegmentIdx++) => {
|
||||||
// console.log('downloading segment %s', idx)
|
// console.log('downloading segment %s', idx)
|
||||||
const url = idx === -1 ? mpd.initUrl : mpd.segmentUrls[idx]
|
const url = segments[idx]
|
||||||
const chunk = await fetch(url)
|
const chunk = await fetch(url)
|
||||||
buffer[idx] = chunk
|
buffer[idx] = chunk
|
||||||
|
|
||||||
|
|
@ -83,14 +107,14 @@ export function concatMpdSegments(options: {
|
||||||
nextSegmentCv.notify()
|
nextSegmentCv.notify()
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nextWorkerSegmentIdx < mpd.segmentUrls.length) {
|
if (nextWorkerSegmentIdx < segments.length) {
|
||||||
return downloadSegment()
|
return downloadSegment()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let error: unknown
|
let error: unknown
|
||||||
void Promise.all(Array.from({
|
void Promise.all(Array.from({
|
||||||
length: Math.min(poolSize, mpd.segmentUrls.length),
|
length: Math.min(poolSize, segments.length),
|
||||||
}, downloadSegment))
|
}, downloadSegment))
|
||||||
.catch((e) => {
|
.catch((e) => {
|
||||||
error = e
|
error = e
|
||||||
|
|
@ -113,7 +137,7 @@ export function concatMpdSegments(options: {
|
||||||
controller.enqueue(buf)
|
controller.enqueue(buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nextSegmentIdx >= mpd.segmentUrls.length) {
|
if (nextSegmentIdx >= segments.length) {
|
||||||
controller.close()
|
controller.close()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue