chore: update public repo

This commit is contained in:
desu-bot 2025-08-14 09:21:11 +00:00
parent ccc5f98f34
commit 728699b3ec
No known key found for this signature in database
8 changed files with 1990 additions and 95 deletions

View file

@ -11,20 +11,28 @@
"@types/better-sqlite3": "^7.6.12",
"@types/plist": "^3.0.5",
"@types/spinnies": "^0.5.3",
"babel-generator": "^6.26.1",
"babel-traverse": "^6.26.0",
"babylon": "^6.18.0",
"better-sqlite3": "^11.8.1",
"canvas": "^3.1.0",
"cheerio": "^1.0.0",
"egoroof-blowfish": "4.0.1",
"es-main": "^1.3.0",
"filesize": "^10.1.6",
"imapflow": "^1.0.193",
"json5": "^2.2.3",
"kuromoji": "^0.1.2",
"mailparser": "^3.7.4",
"nanoid": "^5.0.9",
"node-libcurl-ja3": "^5.0.3",
"patchright": "^1.52.5",
"plist": "^3.1.0",
"qrcode-terminal": "^0.12.0",
"spinnies": "^0.5.1",
"tough-cookie": "^5.0.0",
"tough-cookie-file-store": "^2.0.3",
"ts-morph": "^26.0.0",
"tsx": "^4.19.2",
"undici": "^7.2.0",
"wanakana": "^5.3.1"
@ -33,6 +41,7 @@
"@antfu/eslint-config": "3.10.0",
"@fuman/fetch": "0.1.0",
"@fuman/utils": "0.0.14",
"@types/mailparser": "^3.4.6",
"@types/node": "22.10.0",
"domhandler": "^5.0.3",
"dotenv": "16.4.5",
@ -43,6 +52,7 @@
"pnpm": {
"onlyBuiltDependencies": [
"better-sqlite3",
"node-libcurl-ja3",
"canvas"
]
}

1464
pnpm-lock.yaml generated

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,22 @@
import { fetchSongsIter } from '../../../utils/navidrome.ts'
const IGNORE_PATHS = [
's3/Electronic/_Compilations/keygenjukebox/',
]
let count = 0
for await (const song of fetchSongsIter()) {
if (IGNORE_PATHS.some(path => song.path.startsWith(path))) {
continue
}
for (const field of ['mbzRecordingID', 'mbzReleaseTrackId', 'mbzAlbumId', 'mbzReleaseGroupId']) {
if (!song[field]) {
console.log('found missing %s: %s - %s (%s)', field, song.artist, song.title, song.path)
count++
break
}
}
}
console.log('found %d tracks without mbz ids', count)

View file

@ -0,0 +1,21 @@
import { fetchSongsIter } from '../../../utils/navidrome.ts'
const WHITELIST_ARTISTS = new Set([
'betwixt & between',
'10th avenue cafe/tak',
'overmind and potatoes',
])
let count = 0
for await (const song of fetchSongsIter()) {
if (
(!song.participants?.artist || song.participants.artist.length === 1)
&& song.artist.match(/, | and | & |\/| x | feat\. /i)
&& !WHITELIST_ARTISTS.has(song.artist.toLowerCase())
) {
console.log('possible multiartist: %s - %s (%s)', song.artist, song.title, song.path)
count++
}
}
console.log('found %d possible multiartists', count)

View file

@ -311,12 +311,93 @@ function getTrackName(track: GwTrack) {
return name
}
// todo
// async function resolveMusicbrainzIds(albumId: number) {
// const deezerUrl = `https://www.deezer.com/album/${albumId}`
// // try odesli api to fetch extra links
// const odesliRes = await ffetch('https://api.song.link/v1-alpha.1/links', {
// query: {
// url: deezerUrl,
// key: '71d7be8a-3a76-459b-b21e-8f0350374984',
// },
// }).parsedJson(z.object({
// linksByPlatform: z.record(z.string(), z.object({
// url: z.string(),
// })),
// })).catch(() => null)
// const urls = [deezerUrl]
// if (odesliRes) {
// for (const { url } of Object.values(odesliRes.linksByPlatform)) {
// urls.push(url)
// }
// }
// // try to resolve musicbrainz album id
// const mbRes1 = await ffetch('https://musicbrainz.org/ws/2/url', {
// query: {
// resource: urls,
// inc: 'release-rels',
// },
// }).parsedJson(z.object({
// urls: z.array(z.object({
// relations: z.array(z.any()),
// })),
// }))
// const uniqueMbIds = new Set<string>()
// for (const { relations } of mbRes1.urls) {
// for (const rel of relations) {
// if (rel['target-type'] !== 'release') continue
// uniqueMbIds.add(rel.release.id)
// }
// }
// if (uniqueMbIds.size === 0) return null
// const releaseMbId = uniqueMbIds.values().next().value
// // resolve the rest of the ids from the release
// const releaseRes = await ffetch(`https://musicbrainz.org/ws/2/release/${releaseMbId}`, {
// query: {
// inc: 'artists recordings',
// },
// }).parsedJson(z.object({
// 'artist-credit': z.array(z.object({
// artist: z.object({
// id: z.string(),
// }),
// })).optional(),
// 'media': z.array(z.object({
// id: z.string(),
// tracks: z.array(z.object({
// position: z.number(),
// title: z.string(),
// id: z.string(),
// recording: z.object({
// id: z.string(),
// }),
// })),
// })).optional(),
// }))
// return {
// release: releaseMbId,
// artists: releaseRes['artist-credit']?.map(it => it.artist.id) ?? [],
// tracks: releaseRes['media']?.[0]
// }
// }
async function downloadTrack(track: GwTrack, opts: {
destination: string
album?: GwAlbum
}) {
const albumUrl = `https://cdn-images.dzcdn.net/images/cover/${track.ALB_PICTURE}/1500x1500-000000-80-0-0.jpg`
const [getUrlRes, albumAb, lyricsRes] = await Promise.all([
const [
getUrlRes,
albumAb,
lyricsRes,
] = await Promise.all([
ffetch.post('https://media.deezer.com/v1/get_url', {
json: {
license_token: userData.USER.OPTIONS.license_token,
@ -343,6 +424,8 @@ async function downloadTrack(track: GwTrack, opts: {
}),
])
// console.dir(getUrlRes, { depth: null })
const albumCoverPath = join(`assets/deezer-tmp-${track.SNG_ID}.jpg`)
await writeFile(albumCoverPath, new Uint8Array(albumAb))
@ -487,6 +570,10 @@ async function downloadTrack(track: GwTrack, opts: {
params.push(`--set-tag=COPYRIGHT=${opts.album.COPYRIGHT}`)
}
if (lyricsLrc) {
params.push(`--set-tag=LYRICS=${lyricsLrc}`)
}
params.push(filename)
await $`metaflac ${params}`
@ -605,9 +692,10 @@ async function downloadArtist(artistId: string) {
spinnies.succeed('collect', { text: `collected ${albums.length} albums with a total of ${trackCount} tracks` })
}
// fixme: singles should always contain artist name and be saved in artist root dir
// fixme: "featured" albums (i.e. when main artist of the album is not the one we're dling) should have album artist name in its dirname
// fixme: "featured" albums/tracks (i.e. when main artist of the album is not the one we're dling) should have album artist name in its dirname
// fixme: singles should be saved in artist root dir
// todo: automatic musicbrainz matching
// todo: automatic genius/musixmatch matching for lyrics if unavailable directly from deezer
await asyncPool(albums, async (alb) => {
const tracks = await gwLightApi({
@ -784,6 +872,14 @@ if (url.match(/^(artist|album|track):(\d+)$/)) {
node: z.object({
id: z.string(),
title: z.string(),
contributors: z.object({
edges: z.array(z.object({
node: z.object({
id: z.string(),
name: z.string(),
}),
})),
}),
}),
})),
}),
@ -801,7 +897,7 @@ if (url.match(/^(artist|album|track):(\d+)$/)) {
}
for (const [i, { node }] of iter.enumerate(searchResult.instantSearch.results.tracks.edges)) {
console.log(`track:${node.id}: ${node.title}`)
console.log(`track:${node.id}: ${node.contributors.edges.map(it => it.node.name).join(', ')} - ${node.title}`)
}
const uri = await question('option > ')

View file

@ -0,0 +1,365 @@
import type { Browser } from 'patchright'
import type { EmailVerificationProvider } from '../../utils/temkakit/email-verification.ts'
import { writeFile } from 'node:fs/promises'
import { faker } from '@faker-js/faker'
import { sleep } from '@fuman/utils'
import { load } from 'cheerio'
import { Cookie, CookieJar } from 'tough-cookie'
import { ffetch as ffetchBase } from '../../utils/fetch.ts'
import { AnymessageEmailVerificationProvider } from '../../utils/temkakit/anymessage.ts'
import { solveKasadaSalamoonder } from '../../utils/temkakit/kasada-solver.ts'
import { createLibcurlFetch } from '../../utils/temkakit/libcurl.ts'
// half broken, unfinished
function getProxy() {
// return {
// user: 'JaTjXK',
// pass: 'WYsU4C',
// host: '38.152.247.16',
// port: 9785,
// }
return {
user: '',
pass: '',
host: '127.0.0.1',
port: 7891,
}
}
function proxyToUrl(proxy: { user: string, pass: string, host: string, port: number }) {
return `http://${proxy.user}:${proxy.pass}@${proxy.host}:${proxy.port}`
}
const THREADS = 1
const ACCOUNTS_COUNT = 2
const TWITCH_PJS = 'https://k.twitchcdn.net/149e9513-01fa-4fb0-aad4-566afd725d1b/2d206a39-8ed7-437e-a3be-862e0f06eea3/p.js'
async function twitchAutoreg(options: {
// browser: Browser
emailProvider: EmailVerificationProvider
log?: (format: string, ...args: any[]) => void
proxy?: string
}) {
const {
// browser,
proxy,
emailProvider,
log = (fmt, ...args) => console.log(fmt, ...args),
} = options
const jar = new CookieJar()
log('proxy', proxy)
const ffetch = ffetchBase.extend({
cookies: jar,
fetch: createLibcurlFetch({ proxy }),
})
log('fetching main page')
const mainPage = await ffetch('https://www.twitch.tv/').text()
const twilightBuildId = mainPage.match(/window.__twilightBuildID="([^"]+)"/)?.[1]
if (!twilightBuildId) {
throw new Error('failed to get twilightBuildId')
}
await jar.setCookie(new Cookie({
key: 'api_token',
value: `twilight.${faker.string.hexadecimal({ length: 32 })}`,
domain: 'twitch.tv',
path: '/',
secure: true,
sameSite: 'None',
hostOnly: false,
expires: new Date(Date.now() + 1000 * 60 * 60 * 24 * 365),
}), 'https://www.twitch.tv')
await jar.setCookie(new Cookie({
key: 'experiment_overrides',
value: encodeURIComponent(JSON.stringify({ experiments: {}, disabled: [] })),
domain: 'twitch.tv',
path: '/',
secure: true,
sameSite: 'None',
hostOnly: false,
expires: new Date(Date.now() + 1000 * 60 * 60 * 24 * 365),
}), 'https://www.twitch.tv')
const deviceId = faker.string.alphanumeric({ length: 32 })
const sessionId = faker.string.hexadecimal({ length: 16 })
log('generating integrity token')
// const kasadaSolver = await createKasadaSolver({
// pageUrl: 'https://www.twitch.tv/',
// scriptUrl: '',
// browser,
// beforePageLoad: async (page) => {
// await syncCookiesIntoBrowser(jar, page.context())
// },
// requests: [
// {
// protocol: 'https',
// method: 'POST',
// domain: 'gql.twitch.tv',
// path: '/integrity',
// },
// {
// protocol: 'https',
// method: 'POST',
// domain: 'passport.twitch.tv',
// path: '/integrity',
// },
// {
// protocol: 'https',
// method: 'POST',
// domain: 'passport.twitch.tv',
// path: '/protected_register',
// },
// {
// protocol: 'https',
// method: 'POST',
// domain: 'passport.twitch.tv',
// path: '/protected_login',
// },
// ],
// })
const commonHeaders: Record<string, string> = {
'X-Device-Id': deviceId,
'Client-Id': 'kimne78kx3ncx6brgo4mv6wki5h1ko',
'Client-Request-Id': faker.string.alphanumeric({ length: 32 }),
'Client-Session-Id': sessionId,
'Client-Version': twilightBuildId,
}
const kasadaSolution = await solveKasadaSalamoonder({ pjs: TWITCH_PJS })
// const integrityToken = await kasadaSolver.request({
// url: 'https://gql.twitch.tv/integrity',
// method: 'POST',
// headers: commonHeaders,
// }) as { token: string }
const integrityToken = await ffetch('https://gql.twitch.tv/integrity', {
method: 'POST',
headers: {
...commonHeaders,
...kasadaSolution,
},
}).json() as { token: string }
const ffetchGql = ffetchBase.extend({
headers: {
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-site',
...commonHeaders,
'Client-Integrity': integrityToken.token,
},
})
// await syncCookiesFromBrowser(kasadaSolver.page.context(), jar)
let username
while (true) {
username = faker.internet.username().toLowerCase().replace(/[^a-z0-9]/gi, '')
log('checking username', username)
const r = await ffetchGql.post('https://gql.twitch.tv/gql', {
json: [
{
operationName: 'UsernameValidator_User',
variables: { username },
extensions: {
persistedQuery: {
version: 1,
sha256Hash: 'fd1085cf8350e309b725cf8ca91cd90cac03909a3edeeedbd0872ac912f3d660',
},
},
},
],
}).json() as any
if (r[0].errors) {
throw new Error(`failed to check username:${JSON.stringify(r[0].errors)}`)
}
if (r[0].data.isUsernameAvailable) {
log('username is available: %s', username)
break
}
await sleep(1000)
}
log('ordering email')
const email = await emailProvider.getEmail()
log('got email: %s, registering', email)
const password = faker.internet.password({ length: 16, pattern: /[a-z0-9]/ })
const birthday = faker.date.birthdate({ min: 18, max: 25, mode: 'age' })
const registerBody: Record<string, any> = {
username,
password,
email,
birthday: {
day: birthday.getDate(),
month: birthday.getMonth() + 1,
year: birthday.getFullYear(),
isOver18: true,
},
email_marketing_opt_in: false,
client_id: 'kimne78kx3ncx6brgo4mv6wki5h1ko',
is_password_guide: 'nist',
}
for (let i = 0; i < 5; i++) {
// const r1 = await kasadaSolver.request({
// url: 'https://passport.twitch.tv/protected_register',
// method: 'POST',
// body: JSON.stringify(registerBody),
// headers: {
// 'Content-Type': 'text/plain;charset=UTF-8',
// 'Accept': '*/*',
// },
// credentials: 'include',
// }) as { error_code: number }
log('solving kasada')
const kasadaSolution = await solveKasadaSalamoonder({ pjs: TWITCH_PJS })
const r1 = await ffetch.post('https://passport.twitch.tv/protected_register', {
validateResponse: false,
json: registerBody,
headers: {
...kasadaSolution,
},
}).json() as { error_code: number }
log('r1', r1)
if (i < 4 && r1.error_code === 5025) {
log('integrity failed, retrying...')
continue
}
if (r1.error_code !== 2026) {
await emailProvider.dispose()
throw new Error(`failed to register: ${JSON.stringify(r1)}`)
}
break
}
log('waiting for code')
const message = await emailProvider.waitForMessage({ timeout: 90_000 })
const message$ = load(message)
const code = message$('center p[style^=background]').text() // what the fuck is this selector
// const code = await question('code: ')
if (!code.match(/^\d{6}$/)) {
log('❌ invalid code parsed: %s', code)
log(message)
await emailProvider.dispose()
throw new Error(`invalid code parsed:${code}`)
}
log('code: %s', code)
registerBody.email_verification_code = code
for (let i = 0; i < 5; i++) {
// const r2 = await kasadaSolver.request({
// url: 'https://passport.twitch.tv/protected_register',
// method: 'POST',
// body: JSON.stringify(registerBody),
// headers: {
// 'Content-Type': 'text/plain;charset=UTF-8',
// },
// credentials: 'include',
// }) as { error_code: number }
log('solving kasada')
const kasadaSolution = await solveKasadaSalamoonder({ pjs: TWITCH_PJS })
const r2 = await ffetch.post('https://passport.twitch.tv/protected_register', {
json: registerBody,
validateResponse: false,
headers: {
...kasadaSolution,
},
}).json() as { error_code: number }
if (i < 4 && r2.error_code === 5025) {
log('integrity failed, retrying...')
continue
}
if (r2.error_code) {
await emailProvider.dispose()
throw new Error(`❌ failed to register:${r2.error_code}`)
}
break
}
// await syncCookiesFromBrowser(kasadaSolver.page.context(), jar)
log('авторег работает!')
await emailProvider.dispose()
return {
username,
password,
email,
cookies: await jar.store.getAllCookies(),
}
}
let started = 0
let completed = 0
await Promise.all(Array.from({ length: THREADS }).map(async (_, idx) => {
const emailProvider = new AnymessageEmailVerificationProvider({
site: 'twitch.tv',
domain: 'hotmail.com',
})
let browser: Browser | null = null
while (true) {
if (started >= ACCOUNTS_COUNT) {
break
}
started++
const log = (fmt: string, ...args: any[]) => console.log(`[worker ${idx}] ${fmt}`, ...args)
try {
const proxy = getProxy()
// browser = await chromium.launch({
// channel: 'chrome',
// headless: false,
// env: {
// TZ: 'Europe/Amsterdam',
// },
// proxy: {
// server: `http://${proxy.host}:${proxy.port}`,
// username: proxy.user,
// password: proxy.pass,
// },
// })
const acct = await twitchAutoreg({
// browser,
proxy: proxyToUrl(proxy),
emailProvider,
log,
})
await writeFile('assets/twitch-accs.txt', `${JSON.stringify(acct)}\n`, { flag: 'a' })
completed++
log('completed: %d/%d', completed, ACCOUNTS_COUNT)
} catch (e) {
log('autoreg error: %s', e)
// await browser?.close()
browser = null
started--
}
}
}))

View file

@ -1,87 +0,0 @@
import { sleep } from '@fuman/utils'
import { z } from 'zod'
import { ffetch } from './fetch.ts'
import { getEnv } from './misc.ts'
const CreateTaskResponse = z.object({
errorId: z.number(),
errorCode: z.string().optional().nullable(),
taskId: z.number(),
})
const GetTaskResultResponse = z.object({
errorId: z.number(),
errorCode: z.string().optional().nullable(),
status: z.enum(['ready', 'processing']),
solution: z.unknown().optional(),
})
export async function solveCaptcha(task: unknown) {
const res = await ffetch.post('https://api.capmonster.cloud/createTask', {
json: {
clientKey: getEnv('CAPMONSTER_API_TOKEN'),
task,
},
}).parsedJson(CreateTaskResponse)
if (res.errorId) {
throw new Error(`createTask error ${res.errorId}: ${res.errorCode}`)
}
const taskId = res.taskId
await sleep(5_000)
let requestCount = 0
while (true) {
requestCount += 1
if (requestCount > 100) {
// "Limit: 120 requests per task. If the limit is exceeded, the user's account may be temporarily locked."
// just to be safe
throw new Error('captcha request count exceeded')
}
const res = await ffetch.post('https://api.capmonster.cloud/getTaskResult', {
json: {
clientKey: getEnv('CAPMONSTER_API_TOKEN'),
taskId,
},
}).parsedJson(GetTaskResultResponse)
if (res.errorId) {
throw new Error(`getTaskResult error ${res.errorId}: ${res.errorCode}`)
}
if (res.status === 'ready') {
return res.solution
}
await sleep(2_000)
}
}
export async function solveRecaptcha(params?: {
url: string
siteKey: string
s?: string
userAgent?: string
cookies?: string
isInvisible?: boolean
}) {
const res = await solveCaptcha({
type: 'RecaptchaV2TaskProxyless',
websiteURL: params?.url,
websiteKey: params?.siteKey,
recaptchaDataSValue: params?.s,
userAgent: params?.userAgent,
cookies: params?.cookies,
isInvisible: params?.isInvisible,
})
if (typeof res !== 'object' || !res || !('gRecaptchaResponse' in res) || typeof res.gRecaptchaResponse !== 'string') {
throw new Error('invalid recaptcha response')
}
return res.gRecaptchaResponse
}

View file

@ -33,6 +33,16 @@ export const NavidromeSong = z.object({
libraryPath: z.string(),
duration: z.number(),
size: z.number(),
participants: z.object({
artist: z.object({
id: z.string(),
name: z.string(),
}).array().optional(),
}).optional(),
mbzRecordingID: z.string().optional(),
mbzReleaseTrackId: z.string().optional(),
mbzAlbumId: z.string().optional(),
mbzReleaseGroupId: z.string().optional(),
})
export type NavidromeSong = z.infer<typeof NavidromeSong>
@ -43,7 +53,7 @@ export async function fetchSongs(offset: number, pageSize: number) {
_start: offset,
_end: offset + pageSize,
_order: 'ASC',
_sort: 'title',
_sort: 'path',
},
}).parsedJson(z.array(NavidromeSong))
}