mirror of
https://git.stupid.fish/teidesu/scripts.git
synced 2025-07-28 02:32:11 +10:00
chore: update public repo
This commit is contained in:
parent
e0109980c0
commit
e7c9507247
25 changed files with 5364 additions and 0 deletions
113
utils/currency.ts
Normal file
113
utils/currency.ts
Normal file
|
@ -0,0 +1,113 @@
|
|||
import { asyncPool } from '@fuman/utils'
|
||||
|
||||
import { z } from 'zod'
|
||||
import { ffetch } from './fetch.ts'
|
||||
import { getEnv } from './misc.ts'
|
||||
|
||||
// token management
|
||||
const TOKENS = getEnv('OXR_TOKENS').split(',')
|
||||
// api token => requests remaining
|
||||
const usageAvailable = new Map<string, number>()
|
||||
function getToken() {
|
||||
// find token with the most requests remaining
|
||||
const token = TOKENS.find(t => usageAvailable.get(t)! > 0)
|
||||
if (!token) throw new Error('no tokens available')
|
||||
|
||||
// consume 1 request
|
||||
usageAvailable.set(token, usageAvailable.get(token)! - 1)
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
// base => other => value
|
||||
// NB: ideally we should have expiration and persistence on this
|
||||
const data = new Map<string, Record<string, number>>()
|
||||
|
||||
async function fetchMissingPairs(list: { from: string, to: string }[]) {
|
||||
const missing = list.filter(c => !data.has(c.from) && !data.has(c.to) && c.from !== c.to)
|
||||
if (missing.length === 0) return
|
||||
|
||||
const basesToFetch = new Set<string>()
|
||||
|
||||
for (const { from, to } of missing) {
|
||||
if (!basesToFetch.has(from) && !basesToFetch.has(to)) {
|
||||
basesToFetch.add(from)
|
||||
}
|
||||
}
|
||||
|
||||
if (!usageAvailable.size) {
|
||||
// NB: ideally we should lock here for a production-ready implementation
|
||||
|
||||
// fetch usage for all tokens
|
||||
await asyncPool(TOKENS, async (token) => {
|
||||
const res = await ffetch('https://openexchangerates.org/api/usage.json', {
|
||||
query: {
|
||||
app_id: token,
|
||||
},
|
||||
}).parsedJson(z.object({
|
||||
status: z.literal(200),
|
||||
data: z.object({
|
||||
app_id: z.string(),
|
||||
status: z.literal('active'),
|
||||
usage: z.object({
|
||||
requests_remaining: z.number(),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
usageAvailable.set(token, res.data.usage.requests_remaining)
|
||||
}, { onError: () => 'ignore' })
|
||||
|
||||
if (!usageAvailable.size) {
|
||||
throw new Error('failed to fetch usage, are all tokens dead?')
|
||||
}
|
||||
}
|
||||
|
||||
// console.log('will fetch bases:', [...basesToFetch])
|
||||
|
||||
await asyncPool(basesToFetch, async (base) => {
|
||||
const res = await ffetch('https://openexchangerates.org/api/latest.json', {
|
||||
query: {
|
||||
app_id: getToken(),
|
||||
},
|
||||
}).parsedJson(z.object({
|
||||
rates: z.record(z.string(), z.number()),
|
||||
}))
|
||||
|
||||
data.set(base, res.rates)
|
||||
})
|
||||
}
|
||||
|
||||
export async function convertCurrenciesBatch(list: { from: string, to: string, amount: number }[]) {
|
||||
await fetchMissingPairs(list)
|
||||
const ret: { from: string, to: string, amount: number, converted: number }[] = []
|
||||
|
||||
for (const { from, to, amount } of list) {
|
||||
let result: number
|
||||
|
||||
if (from === to) {
|
||||
result = amount
|
||||
} else if (data.has(from)) {
|
||||
const rate = data.get(from)![to]!
|
||||
if (!rate) throw new Error(`rate unavailable: ${from} -> ${to}`)
|
||||
result = amount * rate
|
||||
// console.log('converted from', from, 'to', to, 'amount', amount, 'result', result, 'rate', rate)
|
||||
} else if (data.has(to)) {
|
||||
const rate = data.get(to)![from]!
|
||||
if (!rate) throw new Error(`rate unavailable: ${from} -> ${to}`)
|
||||
result = amount / rate
|
||||
// console.log('converted rev from', from, 'to', to, 'amount', amount, 'result', result, 'rate', rate)
|
||||
} else {
|
||||
throw new Error(`rate unavailable: ${from} -> ${to}`)
|
||||
}
|
||||
|
||||
ret.push({
|
||||
from,
|
||||
to,
|
||||
amount,
|
||||
converted: result,
|
||||
})
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue