mirror of
https://git.stupid.fish/teidesu/scripts.git
synced 2025-07-28 02:32:11 +10:00
chore: update public repo
This commit is contained in:
parent
e0109980c0
commit
e7c9507247
25 changed files with 5364 additions and 0 deletions
87
utils/captcha.ts
Normal file
87
utils/captcha.ts
Normal file
|
@ -0,0 +1,87 @@
|
|||
import { sleep } from '@fuman/utils'
|
||||
import { z } from 'zod'
|
||||
import { ffetch } from './fetch.ts'
|
||||
import { getEnv } from './misc.ts'
|
||||
|
||||
const CreateTaskResponse = z.object({
|
||||
errorId: z.number(),
|
||||
errorCode: z.string().optional().nullable(),
|
||||
taskId: z.number(),
|
||||
})
|
||||
|
||||
const GetTaskResultResponse = z.object({
|
||||
errorId: z.number(),
|
||||
errorCode: z.string().optional().nullable(),
|
||||
status: z.enum(['ready', 'processing']),
|
||||
solution: z.unknown().optional(),
|
||||
})
|
||||
|
||||
export async function solveCaptcha(task: unknown) {
|
||||
const res = await ffetch.post('https://api.capmonster.cloud/createTask', {
|
||||
json: {
|
||||
clientKey: getEnv('CAPMONSTER_API_TOKEN'),
|
||||
task,
|
||||
},
|
||||
}).parsedJson(CreateTaskResponse)
|
||||
|
||||
if (res.errorId) {
|
||||
throw new Error(`createTask error ${res.errorId}: ${res.errorCode}`)
|
||||
}
|
||||
|
||||
const taskId = res.taskId
|
||||
|
||||
await sleep(5_000)
|
||||
|
||||
let requestCount = 0
|
||||
|
||||
while (true) {
|
||||
requestCount += 1
|
||||
if (requestCount > 100) {
|
||||
// "Limit: 120 requests per task. If the limit is exceeded, the user's account may be temporarily locked."
|
||||
// just to be safe
|
||||
throw new Error('captcha request count exceeded')
|
||||
}
|
||||
|
||||
const res = await ffetch.post('https://api.capmonster.cloud/getTaskResult', {
|
||||
json: {
|
||||
clientKey: getEnv('CAPMONSTER_API_TOKEN'),
|
||||
taskId,
|
||||
},
|
||||
}).parsedJson(GetTaskResultResponse)
|
||||
|
||||
if (res.errorId) {
|
||||
throw new Error(`getTaskResult error ${res.errorId}: ${res.errorCode}`)
|
||||
}
|
||||
|
||||
if (res.status === 'ready') {
|
||||
return res.solution
|
||||
}
|
||||
|
||||
await sleep(2_000)
|
||||
}
|
||||
}
|
||||
|
||||
export async function solveRecaptcha(params?: {
|
||||
url: string
|
||||
siteKey: string
|
||||
s?: string
|
||||
userAgent?: string
|
||||
cookies?: string
|
||||
isInvisible?: boolean
|
||||
}) {
|
||||
const res = await solveCaptcha({
|
||||
type: 'RecaptchaV2TaskProxyless',
|
||||
websiteURL: params?.url,
|
||||
websiteKey: params?.siteKey,
|
||||
recaptchaDataSValue: params?.s,
|
||||
userAgent: params?.userAgent,
|
||||
cookies: params?.cookies,
|
||||
isInvisible: params?.isInvisible,
|
||||
})
|
||||
|
||||
if (typeof res !== 'object' || !res || !('gRecaptchaResponse' in res) || typeof res.gRecaptchaResponse !== 'string') {
|
||||
throw new Error('invalid recaptcha response')
|
||||
}
|
||||
|
||||
return res.gRecaptchaResponse
|
||||
}
|
113
utils/currency.ts
Normal file
113
utils/currency.ts
Normal file
|
@ -0,0 +1,113 @@
|
|||
import { asyncPool } from '@fuman/utils'
|
||||
|
||||
import { z } from 'zod'
|
||||
import { ffetch } from './fetch.ts'
|
||||
import { getEnv } from './misc.ts'
|
||||
|
||||
// token management
|
||||
const TOKENS = getEnv('OXR_TOKENS').split(',')
|
||||
// api token => requests remaining
|
||||
const usageAvailable = new Map<string, number>()
|
||||
function getToken() {
|
||||
// find token with the most requests remaining
|
||||
const token = TOKENS.find(t => usageAvailable.get(t)! > 0)
|
||||
if (!token) throw new Error('no tokens available')
|
||||
|
||||
// consume 1 request
|
||||
usageAvailable.set(token, usageAvailable.get(token)! - 1)
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
// base => other => value
|
||||
// NB: ideally we should have expiration and persistence on this
|
||||
const data = new Map<string, Record<string, number>>()
|
||||
|
||||
async function fetchMissingPairs(list: { from: string, to: string }[]) {
|
||||
const missing = list.filter(c => !data.has(c.from) && !data.has(c.to) && c.from !== c.to)
|
||||
if (missing.length === 0) return
|
||||
|
||||
const basesToFetch = new Set<string>()
|
||||
|
||||
for (const { from, to } of missing) {
|
||||
if (!basesToFetch.has(from) && !basesToFetch.has(to)) {
|
||||
basesToFetch.add(from)
|
||||
}
|
||||
}
|
||||
|
||||
if (!usageAvailable.size) {
|
||||
// NB: ideally we should lock here for a production-ready implementation
|
||||
|
||||
// fetch usage for all tokens
|
||||
await asyncPool(TOKENS, async (token) => {
|
||||
const res = await ffetch('https://openexchangerates.org/api/usage.json', {
|
||||
query: {
|
||||
app_id: token,
|
||||
},
|
||||
}).parsedJson(z.object({
|
||||
status: z.literal(200),
|
||||
data: z.object({
|
||||
app_id: z.string(),
|
||||
status: z.literal('active'),
|
||||
usage: z.object({
|
||||
requests_remaining: z.number(),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
usageAvailable.set(token, res.data.usage.requests_remaining)
|
||||
}, { onError: () => 'ignore' })
|
||||
|
||||
if (!usageAvailable.size) {
|
||||
throw new Error('failed to fetch usage, are all tokens dead?')
|
||||
}
|
||||
}
|
||||
|
||||
// console.log('will fetch bases:', [...basesToFetch])
|
||||
|
||||
await asyncPool(basesToFetch, async (base) => {
|
||||
const res = await ffetch('https://openexchangerates.org/api/latest.json', {
|
||||
query: {
|
||||
app_id: getToken(),
|
||||
},
|
||||
}).parsedJson(z.object({
|
||||
rates: z.record(z.string(), z.number()),
|
||||
}))
|
||||
|
||||
data.set(base, res.rates)
|
||||
})
|
||||
}
|
||||
|
||||
export async function convertCurrenciesBatch(list: { from: string, to: string, amount: number }[]) {
|
||||
await fetchMissingPairs(list)
|
||||
const ret: { from: string, to: string, amount: number, converted: number }[] = []
|
||||
|
||||
for (const { from, to, amount } of list) {
|
||||
let result: number
|
||||
|
||||
if (from === to) {
|
||||
result = amount
|
||||
} else if (data.has(from)) {
|
||||
const rate = data.get(from)![to]!
|
||||
if (!rate) throw new Error(`rate unavailable: ${from} -> ${to}`)
|
||||
result = amount * rate
|
||||
// console.log('converted from', from, 'to', to, 'amount', amount, 'result', result, 'rate', rate)
|
||||
} else if (data.has(to)) {
|
||||
const rate = data.get(to)![from]!
|
||||
if (!rate) throw new Error(`rate unavailable: ${from} -> ${to}`)
|
||||
result = amount / rate
|
||||
// console.log('converted rev from', from, 'to', to, 'amount', amount, 'result', result, 'rate', rate)
|
||||
} else {
|
||||
throw new Error(`rate unavailable: ${from} -> ${to}`)
|
||||
}
|
||||
|
||||
ret.push({
|
||||
from,
|
||||
to,
|
||||
amount,
|
||||
converted: result,
|
||||
})
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
37
utils/fetch.ts
Normal file
37
utils/fetch.ts
Normal file
|
@ -0,0 +1,37 @@
|
|||
import { createWriteStream } from 'node:fs'
|
||||
|
||||
import { type FfetchAddon, ffetchAddons, ffetchBase, type FfetchResultInternals } from '@fuman/fetch'
|
||||
import { toughCookieAddon } from '@fuman/fetch/tough'
|
||||
import { ffetchZodAdapter } from '@fuman/fetch/zod'
|
||||
import { webReadableToFuman, write } from '@fuman/io'
|
||||
import { nodeWritableToFuman } from '@fuman/node'
|
||||
import { type CheerioAPI, load } from 'cheerio'
|
||||
|
||||
const cheerioAddon: FfetchAddon<object, { cheerio: () => Promise<CheerioAPI> }> = {
|
||||
response: {
|
||||
async cheerio(this: FfetchResultInternals<object>) {
|
||||
this._headers ??= {}
|
||||
this._headers.Accept ??= 'text/html; charset=utf-8'
|
||||
return load(await this.text())
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
export const ffetch = ffetchBase.extend({
|
||||
addons: [
|
||||
ffetchAddons.parser(ffetchZodAdapter()),
|
||||
cheerioAddon,
|
||||
toughCookieAddon(),
|
||||
],
|
||||
})
|
||||
|
||||
export async function downloadStream(stream: ReadableStream, path: string) {
|
||||
const file = nodeWritableToFuman(createWriteStream(path))
|
||||
await write.pipe(file, webReadableToFuman(stream))
|
||||
file.close()
|
||||
}
|
||||
|
||||
export async function downloadFile(url: string, path: string, extra?: Parameters<typeof ffetch>[1]) {
|
||||
const stream = await ffetch(url, extra).stream()
|
||||
await downloadStream(stream, path)
|
||||
}
|
19
utils/fs.ts
Normal file
19
utils/fs.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
import * as fsp from 'node:fs/promises'
|
||||
|
||||
export async function fileExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
const stat = await fsp.stat(path)
|
||||
return stat.isFile()
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export async function directoryExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
const stat = await fsp.stat(path)
|
||||
return stat.isDirectory()
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
10
utils/misc.ts
Normal file
10
utils/misc.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
import 'dotenv/config'
|
||||
|
||||
export function getEnv(key: string): string
|
||||
export function getEnv<T>(key: string, parser: (value: string) => T): T
|
||||
export function getEnv<T>(key: string, parser?: (value: string) => T): T | string {
|
||||
const value = process.env[key]
|
||||
if (!value) throw new Error(`env variable ${key} not found`)
|
||||
if (!parser) return value
|
||||
return parser(value)
|
||||
}
|
32
utils/navidrome.ts
Normal file
32
utils/navidrome.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import { z } from 'zod'
|
||||
import { ffetch as ffetchBase } from './fetch.ts'
|
||||
import { getEnv } from './misc.ts'
|
||||
|
||||
export const navidromeFfetch = ffetchBase.extend({
|
||||
baseUrl: getEnv('NAVIDROME_ENDPOINT'),
|
||||
headers: {
|
||||
'x-nd-authorization': `Bearer ${getEnv('NAVIDROME_TOKEN')}`,
|
||||
},
|
||||
})
|
||||
|
||||
export const NavidromeSong = z.object({
|
||||
id: z.string(),
|
||||
title: z.string(),
|
||||
album: z.string(),
|
||||
albumArtist: z.string(),
|
||||
artist: z.string(),
|
||||
path: z.string(),
|
||||
duration: z.number(),
|
||||
})
|
||||
export type NavidromeSong = z.infer<typeof NavidromeSong>
|
||||
|
||||
export function fetchSongs(offset: number, pageSize: number) {
|
||||
return navidromeFfetch('/api/song', {
|
||||
query: {
|
||||
_start: offset,
|
||||
_end: offset + pageSize,
|
||||
_order: 'ASC',
|
||||
_sort: 'title',
|
||||
},
|
||||
}).parsedJson(z.array(NavidromeSong))
|
||||
}
|
78
utils/oauth.ts
Normal file
78
utils/oauth.ts
Normal file
|
@ -0,0 +1,78 @@
|
|||
import type { MaybePromise } from '@fuman/utils'
|
||||
import * as fsp from 'node:fs/promises'
|
||||
import { z } from 'zod'
|
||||
|
||||
export interface OauthStorage {
|
||||
write: (value: string) => MaybePromise<void>
|
||||
read: () => MaybePromise<string | null>
|
||||
}
|
||||
|
||||
export class LocalOauthStorage implements OauthStorage {
|
||||
constructor(private filename: string) {}
|
||||
|
||||
async write(value: string) {
|
||||
await fsp.writeFile(this.filename, value)
|
||||
}
|
||||
|
||||
async read() {
|
||||
try {
|
||||
return await fsp.readFile(this.filename, 'utf8')
|
||||
} catch (e) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const OauthState = z.object({
|
||||
accessToken: z.string(),
|
||||
refreshToken: z.string().optional(),
|
||||
expiresAt: z.number(),
|
||||
})
|
||||
type OauthState = z.infer<typeof OauthState>
|
||||
|
||||
export class OauthHandler {
|
||||
constructor(private params: {
|
||||
storage: OauthStorage
|
||||
refreshToken: (refreshToken: string) => MaybePromise<{
|
||||
accessToken: string
|
||||
refreshToken: string
|
||||
expiresIn: number
|
||||
}>
|
||||
/** number of milliseconds to subtract from token expiration time */
|
||||
jitter?: number
|
||||
}) {
|
||||
this.params.jitter = this.params.jitter ?? 5000
|
||||
}
|
||||
|
||||
#cache: OauthState | null = null
|
||||
async readOauthState() {
|
||||
if (this.#cache) return this.#cache
|
||||
const value = await this.params.storage.read()
|
||||
if (!value) return null
|
||||
|
||||
return OauthState.parse(JSON.parse(value))
|
||||
}
|
||||
|
||||
async writeOauthState(value: OauthState) {
|
||||
this.#cache = value
|
||||
await this.params.storage.write(JSON.stringify(value))
|
||||
}
|
||||
|
||||
async getAccessToken() {
|
||||
const state = await this.readOauthState()
|
||||
if (!state) return null
|
||||
|
||||
if (state.expiresAt < Date.now() + this.params.jitter!) {
|
||||
if (!state.refreshToken) return null
|
||||
const { accessToken, refreshToken, expiresIn } = await this.params.refreshToken(state.refreshToken)
|
||||
await this.writeOauthState({
|
||||
accessToken,
|
||||
refreshToken,
|
||||
expiresAt: Date.now() + expiresIn * 1000,
|
||||
})
|
||||
return accessToken
|
||||
}
|
||||
|
||||
return state.accessToken
|
||||
}
|
||||
}
|
11
utils/telegram.ts
Normal file
11
utils/telegram.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
import { TelegramClient, type TelegramClientOptions } from '@mtcute/node'
|
||||
import { getEnv } from './misc.ts'
|
||||
|
||||
export function createTg(session: string, extra?: Partial<TelegramClientOptions>) {
|
||||
return new TelegramClient({
|
||||
apiId: getEnv('TELEGRAM_API_ID', Number),
|
||||
apiHash: getEnv('TELEGRAM_API_HASH'),
|
||||
storage: `assets/${session}.session`,
|
||||
...extra,
|
||||
})
|
||||
}
|
324
utils/webdav.ts
Normal file
324
utils/webdav.ts
Normal file
|
@ -0,0 +1,324 @@
|
|||
import { ffetchBase, type FfetchResult } from '@fuman/fetch'
|
||||
import { asNonNull, assert, base64, utf8 } from '@fuman/utils'
|
||||
import { Parser } from 'htmlparser2'
|
||||
import { z } from 'zod'
|
||||
|
||||
const XML_HEADER = '<?xml version="1.0" encoding="utf-8" ?>'
|
||||
|
||||
export interface WebdavClientOptions {
|
||||
baseUrl: string
|
||||
username?: string
|
||||
password?: string
|
||||
headers?: Record<string, string>
|
||||
}
|
||||
|
||||
export interface WebdavResourceBase {
|
||||
href: string
|
||||
name: string
|
||||
status: string
|
||||
lastModified?: Date
|
||||
raw: Record<string, unknown>
|
||||
// todo: lockdiscovery
|
||||
// todo: supportedlock
|
||||
}
|
||||
|
||||
export interface WebdavCollection extends WebdavResourceBase {
|
||||
type: 'collection'
|
||||
}
|
||||
|
||||
export interface WebdavFile extends WebdavResourceBase {
|
||||
type: 'file'
|
||||
size: number
|
||||
etag?: string
|
||||
contentType?: string
|
||||
}
|
||||
|
||||
export type WebdavResource = WebdavCollection | WebdavFile
|
||||
|
||||
const DResponseSchema = z.object({
|
||||
'd:href': z.string(),
|
||||
'd:propstat': z.object({
|
||||
'd:prop': z.object({
|
||||
'd:resourcetype': z.union([
|
||||
z.literal(true),
|
||||
z.object({
|
||||
'd:collection': z.literal(true),
|
||||
}),
|
||||
]),
|
||||
'd:displayname': z.union([z.literal(true), z.string()]),
|
||||
'd:getcontentlength': z.coerce.number().optional(),
|
||||
'd:getlastmodified': z.string().transform(v => new Date(v)).optional(),
|
||||
'd:getetag': z.string().optional(),
|
||||
'd:getcontenttype': z.string().optional(),
|
||||
}).passthrough(),
|
||||
'd:status': z.string(),
|
||||
}),
|
||||
})
|
||||
|
||||
const DMultistatusSchema = z.object({
|
||||
'd:multistatus': z.tuple([z.object({
|
||||
'd:response': z.array(DResponseSchema),
|
||||
})]),
|
||||
})
|
||||
|
||||
function escapeXml(str: string) {
|
||||
return str.replace(/</g, '<').replace(/>/g, '>')
|
||||
}
|
||||
|
||||
function xmlToJson(xml: string) {
|
||||
const res: Record<string, any[]> = {}
|
||||
|
||||
const stack: any[] = [res]
|
||||
|
||||
const parser = new Parser({
|
||||
onopentag(name) {
|
||||
name = name.toLowerCase()
|
||||
|
||||
const node: any = {}
|
||||
const top = stack[stack.length - 1]
|
||||
if (!top[name]) {
|
||||
top[name] = []
|
||||
}
|
||||
top[name].push(node)
|
||||
stack.push(node)
|
||||
},
|
||||
onclosetag(name) {
|
||||
const obj = stack.pop()
|
||||
const top = stack[stack.length - 1]
|
||||
const ourIdx = top[name].length - 1
|
||||
|
||||
const keys = Object.keys(obj)
|
||||
if (keys.length === 1 && keys[0] === '_text') {
|
||||
top[name][ourIdx] = obj._text
|
||||
} else if (keys.length === 0) {
|
||||
top[name][ourIdx] = true
|
||||
} else {
|
||||
// replace one-element arrays with the element itself
|
||||
for (const key of keys) {
|
||||
if (key === 'd:response') continue
|
||||
const val = obj[key]
|
||||
if (Array.isArray(val) && val.length === 1) {
|
||||
obj[key] = val[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
ontext(text) {
|
||||
const top = stack[stack.length - 1]
|
||||
if (top._text === undefined) {
|
||||
top._text = ''
|
||||
}
|
||||
top._text += text
|
||||
},
|
||||
})
|
||||
|
||||
parser.write(xml)
|
||||
parser.end()
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
export class WebdavClient {
|
||||
readonly ffetch: typeof ffetchBase
|
||||
readonly basePath
|
||||
|
||||
constructor(options: WebdavClientOptions) {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/xml; charset="utf-8"',
|
||||
...options.headers,
|
||||
}
|
||||
if (options.username) {
|
||||
let authStr = options.username
|
||||
if (options.password) {
|
||||
authStr += `:${options.password}`
|
||||
}
|
||||
headers.Authorization = `Basic ${base64.encode(utf8.encoder.encode(authStr))}`
|
||||
}
|
||||
|
||||
this.ffetch = ffetchBase.extend({
|
||||
baseUrl: options.baseUrl,
|
||||
headers,
|
||||
})
|
||||
this.basePath = new URL(options.baseUrl).pathname
|
||||
if (this.basePath[this.basePath.length - 1] !== '/') {
|
||||
this.basePath += '/'
|
||||
}
|
||||
}
|
||||
|
||||
mapPropfindResponse = (obj: z.infer<typeof DResponseSchema>): WebdavResource => {
|
||||
const name = obj['d:propstat']['d:prop']['d:displayname']
|
||||
const base: WebdavResourceBase = {
|
||||
href: obj['d:href'],
|
||||
name: name === true ? '' : name,
|
||||
status: obj['d:propstat']['d:status'],
|
||||
lastModified: obj['d:propstat']['d:prop']['d:getlastmodified'],
|
||||
raw: obj['d:propstat']['d:prop'],
|
||||
}
|
||||
if (base.href.startsWith(this.basePath)) {
|
||||
base.href = base.href.slice(this.basePath.length)
|
||||
if (base.href !== '/') {
|
||||
base.href = `/${base.href}`
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof obj['d:propstat']['d:prop']['d:resourcetype'] === 'object' && obj['d:propstat']['d:prop']['d:resourcetype']['d:collection']) {
|
||||
const res = base as WebdavCollection
|
||||
res.type = 'collection'
|
||||
return res
|
||||
} else {
|
||||
const res = base as WebdavFile
|
||||
res.type = 'file'
|
||||
res.size = asNonNull(obj['d:propstat']['d:prop']['d:getcontentlength'])
|
||||
res.etag = obj['d:propstat']['d:prop']['d:getetag']
|
||||
res.contentType = obj['d:propstat']['d:prop']['d:getcontenttype']
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
async propfind(
|
||||
path: string,
|
||||
params?: {
|
||||
depth?: number | 'infinity'
|
||||
properties?: string[]
|
||||
},
|
||||
): Promise<WebdavResource[]> {
|
||||
const body = params?.properties
|
||||
? [
|
||||
XML_HEADER,
|
||||
'<d:propfind xmlns:D="DAV:">',
|
||||
'<d:prop>',
|
||||
...params.properties.map(prop => `<${prop}/>`),
|
||||
'</d:prop>',
|
||||
'</d:propfind>',
|
||||
].join('\n')
|
||||
: undefined
|
||||
const res = await this.ffetch(path, {
|
||||
method: 'PROPFIND',
|
||||
headers: {
|
||||
Depth: params?.depth ? String(params.depth) : '1',
|
||||
},
|
||||
body,
|
||||
}).text()
|
||||
|
||||
const json = DMultistatusSchema.parse(xmlToJson(res))
|
||||
return json['d:multistatus'][0]['d:response'].map(this.mapPropfindResponse)
|
||||
}
|
||||
|
||||
async proppatch(path: string, params: {
|
||||
set?: Record<string, string | { _xml: string }>
|
||||
remove?: string[]
|
||||
}): Promise<void> {
|
||||
if (!params.set && !params.remove) return
|
||||
|
||||
const lines: string[] = [
|
||||
XML_HEADER,
|
||||
'<d:propertyupdate xmlns:D="DAV:">',
|
||||
]
|
||||
if (params.set) {
|
||||
lines.push('<d:set>')
|
||||
for (const [key, value] of Object.entries(params.set ?? {})) {
|
||||
lines.push(`<d:prop><${key}>${
|
||||
typeof value === 'object' ? value._xml : escapeXml(value)
|
||||
}</${key}></d:prop>`)
|
||||
}
|
||||
lines.push('</d:set>')
|
||||
}
|
||||
if (params.remove) {
|
||||
lines.push('<d:remove>')
|
||||
for (const key of params.remove) {
|
||||
lines.push(`<d:prop><${key}/></d:prop>`)
|
||||
}
|
||||
lines.push('</d:remove>')
|
||||
}
|
||||
lines.push('</d:propertyupdate>')
|
||||
|
||||
const body = lines.join('\n')
|
||||
await this.ffetch(path, {
|
||||
method: 'PROPPATCH',
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
async mkcol(path: string): Promise<void> {
|
||||
const res = await this.ffetch(path, {
|
||||
method: 'MKCOL',
|
||||
})
|
||||
if (res.status !== 201) throw new Error(`mkcol failed: ${res.status}`)
|
||||
}
|
||||
|
||||
async delete(path: string): Promise<void> {
|
||||
const res = await this.ffetch(path, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (res.status !== 204) throw new Error(`delete failed: ${res.status}`)
|
||||
}
|
||||
|
||||
get(path: string): FfetchResult {
|
||||
return this.ffetch(path, {
|
||||
method: 'GET',
|
||||
})
|
||||
}
|
||||
|
||||
async put(path: string, body: BodyInit): Promise<void> {
|
||||
await this.ffetch(path, {
|
||||
method: 'PUT',
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
async copy(
|
||||
source: string,
|
||||
destination: string,
|
||||
params?: {
|
||||
/** whether to overwrite the destination if it exists */
|
||||
overwrite?: boolean
|
||||
depth?: number | 'infinity'
|
||||
},
|
||||
): Promise<void> {
|
||||
if (destination[0] === '/') destination = destination.slice(1)
|
||||
if (this.basePath) destination = this.basePath + destination
|
||||
const headers: Record<string, string> = {
|
||||
Destination: destination,
|
||||
}
|
||||
if (params?.overwrite !== true) {
|
||||
headers.Overwrite = 'F'
|
||||
}
|
||||
if (params?.depth) {
|
||||
headers.Depth = String(params.depth)
|
||||
}
|
||||
|
||||
const res = await this.ffetch(source, {
|
||||
method: 'COPY',
|
||||
headers,
|
||||
})
|
||||
if (res.status !== 201) throw new Error(`copy failed: ${res.status}`)
|
||||
}
|
||||
|
||||
async move(
|
||||
source: string,
|
||||
destination: string,
|
||||
params?: {
|
||||
/** whether to overwrite the destination if it exists */
|
||||
overwrite?: boolean
|
||||
depth?: number | 'infinity'
|
||||
},
|
||||
): Promise<void> {
|
||||
if (destination[0] === '/') destination = destination.slice(1)
|
||||
if (this.basePath) destination = this.basePath + destination
|
||||
const headers: Record<string, string> = {
|
||||
Destination: destination,
|
||||
}
|
||||
if (params?.overwrite !== true) {
|
||||
headers.Overwrite = 'F'
|
||||
}
|
||||
if (params?.depth) {
|
||||
headers.Depth = String(params.depth)
|
||||
}
|
||||
|
||||
const res = await this.ffetch(source, {
|
||||
method: 'MOVE',
|
||||
headers,
|
||||
})
|
||||
if (res.status !== 201) throw new Error(`move failed: ${res.status}`)
|
||||
}
|
||||
}
|
20
utils/xml.ts
Normal file
20
utils/xml.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import type { ChildNode } from 'domhandler'
|
||||
import { DomHandler } from 'domhandler'
|
||||
import { Parser } from 'htmlparser2'
|
||||
|
||||
export function xmlToDom(xml: string) {
|
||||
let _error: Error | null = null
|
||||
let _dom: ChildNode[] | null = null
|
||||
|
||||
const handler = new DomHandler((error, dom) => {
|
||||
_error = error
|
||||
_dom = dom
|
||||
})
|
||||
const parser = new Parser(handler)
|
||||
parser.write(xml)
|
||||
parser.end()
|
||||
|
||||
if (_error) throw _error
|
||||
|
||||
return _dom!
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue