mirror of
https://git.stupid.fish/teidesu/scripts.git
synced 2026-01-12 15:11:10 +11:00
chore: update public repo
This commit is contained in:
parent
ef375d1188
commit
46cf487f04
5 changed files with 229 additions and 26 deletions
|
|
@ -9,8 +9,9 @@ import { z } from 'zod'
|
|||
import { $, ProcessOutput, question } from 'zx'
|
||||
import { downloadFile, ffetch as ffetchBase } from '../../utils/fetch.ts'
|
||||
import { sanitizeFilename } from '../../utils/fs.ts'
|
||||
import { generateOpusImageBlob, pipeIntoProc } from '../../utils/media-metadata.ts'
|
||||
import { chunks, getEnv } from '../../utils/misc.ts'
|
||||
import { generateOpusImageBlob } from '../../utils/media-metadata.ts'
|
||||
import { concatSegments, parseSimpleHls } from '../../utils/mp4-streaming.ts'
|
||||
|
||||
const ffetchApi = ffetchBase.extend({
|
||||
baseUrl: 'https://api-v2.soundcloud.com',
|
||||
|
|
@ -29,6 +30,9 @@ const ffetchApi = ffetchBase.extend({
|
|||
})
|
||||
const ffetchHtml = ffetchBase.extend({
|
||||
baseUrl: 'https://soundcloud.com',
|
||||
retry: {
|
||||
maxRetries: 3,
|
||||
},
|
||||
headers: {
|
||||
Cookie: `oauth_token=${getEnv('SOUNDCLOUD_TOKEN')}`,
|
||||
},
|
||||
|
|
@ -156,7 +160,7 @@ async function downloadTrack(track: ScTrack, opts: {
|
|||
transcoding = t
|
||||
}
|
||||
|
||||
const { url: hlsUrl } = await ffetchApi(transcoding.url, {
|
||||
const { url: mediaUrl } = await ffetchApi(transcoding.url, {
|
||||
query: {
|
||||
track_authorization: track.track_authorization,
|
||||
},
|
||||
|
|
@ -174,14 +178,39 @@ async function downloadTrack(track: ScTrack, opts: {
|
|||
url: z.string(),
|
||||
}))
|
||||
|
||||
let ext = transcoding.format.mime_type.match(/^audio\/(\w+)(;|$)/)![1]
|
||||
if (ext === 'mp4') ext = 'm4a'
|
||||
const ext = {
|
||||
aac_256k: 'm4a',
|
||||
aac_160k: 'm4a',
|
||||
aac_1_0: 'm4a',
|
||||
aac_hq: 'm4a',
|
||||
abr_hq: 'm4a',
|
||||
abr_sq: 'm4a',
|
||||
mp3_0_0: 'mp3',
|
||||
opus_0_0: 'ogg',
|
||||
}[transcoding.preset]
|
||||
if (!ext) {
|
||||
throw new Error(`Unsupported transcoding preset: ${transcoding.preset}`)
|
||||
}
|
||||
const filename = `${opts.destination}.${ext}`
|
||||
|
||||
let stream: ReadableStream | null = null
|
||||
let ffmpegInput: string
|
||||
if (transcoding.format.protocol === 'hls') {
|
||||
const segments = parseSimpleHls(await ffetchHtml(mediaUrl).text())
|
||||
stream = concatSegments({
|
||||
segments,
|
||||
poolSize: 4,
|
||||
fetch: async url => new Uint8Array(await ffetchHtml(url).arrayBuffer()),
|
||||
})
|
||||
ffmpegInput = 'pipe:0'
|
||||
} else {
|
||||
ffmpegInput = mediaUrl
|
||||
}
|
||||
|
||||
const params: string[] = [
|
||||
'-y',
|
||||
'-i',
|
||||
hlsUrl,
|
||||
ffmpegInput,
|
||||
]
|
||||
|
||||
if (artworkBytes) {
|
||||
|
|
@ -228,13 +257,17 @@ async function downloadTrack(track: ScTrack, opts: {
|
|||
'-metadata',
|
||||
`artist=${track.user.username}`,
|
||||
'-metadata',
|
||||
`comment=${track.description ?? ''}`,
|
||||
`comment=${`${track.description ?? ''}\n\nripped from soundcloud (id: ${track.id}, url: ${track.permalink_url})`}`.trimStart(),
|
||||
filename,
|
||||
)
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
await $`ffmpeg ${params}`.quiet(true)
|
||||
const promise = $`ffmpeg ${params}`.quiet(true)
|
||||
if (stream) {
|
||||
await pipeIntoProc(promise, stream)
|
||||
}
|
||||
await promise
|
||||
break
|
||||
} catch (e) {
|
||||
if (!(e instanceof ProcessOutput)) {
|
||||
|
|
|
|||
|
|
@ -9,8 +9,7 @@ import { ffetch as ffetchBase } from '../../utils/fetch.ts'
|
|||
import { sanitizeFilename } from '../../utils/fs.ts'
|
||||
import { pipeIntoProc, runMetaflac, writeIntoProc } from '../../utils/media-metadata.ts'
|
||||
import { getEnv } from '../../utils/misc.ts'
|
||||
import { concatMpdSegments, parseSimpleMpd } from '../../utils/mpd.ts'
|
||||
import { createLibcurlFetch } from '../../utils/temkakit/libcurl.ts'
|
||||
import { concatSegments, parseSimpleMpd } from '../../utils/mp4-streaming.ts'
|
||||
|
||||
const oauthResponse = await ffetchBase('https://auth.tidal.com/v1/oauth2/token', {
|
||||
form: {
|
||||
|
|
@ -192,8 +191,8 @@ async function downloadTrack(options: {
|
|||
]
|
||||
|
||||
const proc = $`ffmpeg ${params}`
|
||||
await pipeIntoProc(proc, concatMpdSegments({
|
||||
mpd: parseSimpleMpd(utf8.decoder.decode(manifest)),
|
||||
await pipeIntoProc(proc, concatSegments({
|
||||
segments: parseSimpleMpd(utf8.decoder.decode(manifest)).segments,
|
||||
fetch: async url => new Uint8Array(await ffetch(url).arrayBuffer()),
|
||||
}))
|
||||
await proc
|
||||
|
|
@ -408,7 +407,7 @@ if ((m = url.match(/\/track\/(\d+)/))) {
|
|||
|
||||
await rm(tmpAlbumCoverPath)
|
||||
} else if ((m = url.match(/\/album\/(\d+)/))) {
|
||||
await downloadAlbum(m[1])
|
||||
await downloadAlbum(Number(m[1]))
|
||||
} else if ((m = url.match(/\/artist\/(\d+)/))) {
|
||||
const withAppearsOn = (await question('include appears on albums? (y/N) > ')).toLowerCase() === 'y'
|
||||
|
||||
|
|
|
|||
99
scripts/misc/ap-signed.ts
Normal file
99
scripts/misc/ap-signed.ts
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import crypto from 'node:crypto'
|
||||
import { question } from 'zx'
|
||||
import { ffetch } from '../../utils/fetch.ts'
|
||||
import { getEnv } from '../../utils/misc.ts'
|
||||
|
||||
function bigintToBase64Url(bn) {
|
||||
let hex = bn.toString(16)
|
||||
if (hex.length % 2) hex = `0${hex}`
|
||||
// eslint-disable-next-line no-restricted-globals
|
||||
const buf = Buffer.from(hex, 'hex')
|
||||
return buf.toString('base64')
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=+$/, '')
|
||||
}
|
||||
|
||||
interface GoPrivateKey {
|
||||
N: bigint
|
||||
E: bigint
|
||||
D: bigint
|
||||
Primes: [bigint, bigint]
|
||||
Precomputed?: {
|
||||
Dp?: bigint
|
||||
Dq?: bigint
|
||||
Qinv?: bigint
|
||||
}
|
||||
}
|
||||
|
||||
function goKeyToPEM(goKey: GoPrivateKey) {
|
||||
const jwk = {
|
||||
kty: 'RSA',
|
||||
n: bigintToBase64Url(goKey.N),
|
||||
e: bigintToBase64Url(goKey.E),
|
||||
d: bigintToBase64Url(goKey.D),
|
||||
p: bigintToBase64Url(goKey.Primes[0]),
|
||||
q: bigintToBase64Url(goKey.Primes[1]),
|
||||
dp: goKey.Precomputed?.Dp ? bigintToBase64Url(goKey.Precomputed.Dp) : undefined,
|
||||
dq: goKey.Precomputed?.Dq ? bigintToBase64Url(goKey.Precomputed.Dq) : undefined,
|
||||
qi: goKey.Precomputed?.Qinv ? bigintToBase64Url(goKey.Precomputed.Qinv) : undefined,
|
||||
}
|
||||
|
||||
// Remove undefined fields (dp/dq/qi could be missing if not precomputed)
|
||||
Object.keys(jwk).forEach(k => jwk[k] === undefined && delete jwk[k])
|
||||
|
||||
const keyObject = crypto.createPrivateKey({ key: jwk, format: 'jwk' })
|
||||
return keyObject.export({ type: 'pkcs8', format: 'pem' })
|
||||
}
|
||||
|
||||
// ! currently only supports gts privkey format, but should be easy enough to support other key formats
|
||||
const privKey = goKeyToPEM(JSON.parse(getEnv('AP_PRIVKEY'), ((key, value, ctx) => {
|
||||
// go privkey json stores long numbers as just numbers so we need to convert them to bigints. requires node 20+ i think
|
||||
if (typeof value === 'number') return BigInt(ctx.source)
|
||||
return value
|
||||
}) as any))
|
||||
const actor = getEnv('AP_ACTOR')
|
||||
|
||||
const url = new URL(process.argv[2] ?? (await question('url > ')))
|
||||
const body = (process.argv[3] ?? (await question('body (empty for GET) > '))).trim()
|
||||
|
||||
const host = url.host
|
||||
const path = url.pathname
|
||||
const method = body ? 'POST' : 'GET'
|
||||
|
||||
const date = new Date().toUTCString()
|
||||
const digest = body ? `SHA-256=${crypto.createHash('sha256').update(body).digest('base64')}` : undefined
|
||||
let toSign = `(request-target): ${method.toLowerCase()} ${path}
|
||||
host: ${host}
|
||||
date: ${date}`
|
||||
if (body) {
|
||||
toSign += `\ndigest: ${digest}`
|
||||
}
|
||||
|
||||
const signature = crypto.createSign('RSA-SHA256').update(toSign).sign(privKey, 'base64')
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Date': date,
|
||||
'Signature': `keyId="${actor}#main-key",headers="(request-target) host date${body ? ' digest' : ''}",algorithm="rsa-sha256",signature="${signature}"`,
|
||||
'Content-Type': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||
'Accept': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||
}
|
||||
if (body) {
|
||||
headers.Digest = digest!
|
||||
}
|
||||
|
||||
const res = await ffetch(`https://${host}${path}`, {
|
||||
headers,
|
||||
method,
|
||||
body: body || undefined,
|
||||
validateResponse: false,
|
||||
})
|
||||
console.log(res.status)
|
||||
const resText = await res.text()
|
||||
if (resText[0] !== '{') {
|
||||
console.error('bad response:', resText)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const json = JSON.parse(resText)
|
||||
console.dir(json, { depth: null })
|
||||
48
scripts/misc/sourcemap-extractor.ts
Normal file
48
scripts/misc/sourcemap-extractor.ts
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
/**
|
||||
* Simple tool for extracting source code from sourcemaps.
|
||||
* Puts extracted files in ./<sourcemap_file_name>_extracted/
|
||||
* Puts files from `../` (for example, webpack puts there node_modules) dir to _extracted/__/,
|
||||
* and files from `./` to _extracted/
|
||||
*
|
||||
* (c) tggdesu 2019. Licensed under GPLv3.
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs/promises'
|
||||
import * as path from 'node:path'
|
||||
import { question } from 'zx'
|
||||
|
||||
interface SourceMap {
|
||||
sources: string[]
|
||||
sourcesContent: string[]
|
||||
}
|
||||
|
||||
const input = await question('URL or path to file > ')
|
||||
const outDirDefault = `./assets/${input.split('/').pop()?.split('?')[0]}_extracted`
|
||||
const outDir = await question(`Output directory [${outDirDefault}]> `)
|
||||
|
||||
let content: SourceMap
|
||||
if (/^https?:\/\//i.test(input)) {
|
||||
console.log('[+] Fetching sourcemap')
|
||||
const response = await fetch(input)
|
||||
content = await response.json() as SourceMap
|
||||
} else {
|
||||
const data = await fs.readFile(input, 'utf-8')
|
||||
content = JSON.parse(data) as SourceMap
|
||||
}
|
||||
|
||||
if (!content.sources || !content.sourcesContent || content.sources.length !== content.sourcesContent.length) {
|
||||
console.error('[!] Sourcemap is either invalid or does not contain source code')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
for (let i = 0; i < content.sources.length; i++) {
|
||||
const fname = content.sources[i]
|
||||
.replace(/[\\/]\.[\\/]/g, '/_/')
|
||||
.replace(/[\\/]\.\.[\\/]/g, '/__/')
|
||||
.replace(/[:*?'"<>|&]/g, '')
|
||||
process.stdout.write(`[~] ${fname}\r`)
|
||||
await fs.mkdir(path.join(outDir, path.dirname(fname)), { recursive: true })
|
||||
await fs.writeFile(path.join(outDir, fname), content.sourcesContent[i])
|
||||
}
|
||||
|
||||
console.log('\n[v] Finished!')
|
||||
Loading…
Add table
Add a link
Reference in a new issue